"advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"bufstream 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"crates-io 0.1.0",
+ "crossbeam 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"curl 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.70 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tar 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tempdir 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "threadpool 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)",
"url 0.2.37 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "crossbeam"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "curl"
version = "0.2.11"
"winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "threadpool"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "time"
version = "0.1.32"
[dependencies]
advapi32-sys = "0.1"
+crates-io = { path = "src/crates-io", version = "0.1" }
+crossbeam = "0.1"
curl = "0.2"
docopt = "0.6"
env_logger = "0.3"
log = "0.3"
num_cpus = "0.2"
regex = "0.1"
-crates-io = { path = "src/crates-io", version = "0.1" }
rustc-serialize = "0.3"
semver = "0.1"
tar = "0.3"
term = "0.2"
-threadpool = "0.1"
time = "0.1"
toml = "0.1"
url = "0.2"
use std::default::Default;
+use std::fmt;
use std::path::{PathBuf, Path};
use semver::Version;
pub rpath: bool,
pub test: bool,
pub doc: bool,
+ pub run_custom_build: bool,
}
#[derive(Default, Clone, Debug)]
pub test: Profile,
pub bench: Profile,
pub doc: Profile,
+ pub custom_build: Profile,
}
/// Informations about a binary, a library, an example, etc. that is part of the
}
}
+impl fmt::Display for Target {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self.kind {
+ TargetKind::Lib(..) => write!(f, "Target(lib)"),
+ TargetKind::Bin => write!(f, "Target(bin: {})", self.name),
+ TargetKind::Test => write!(f, "Target(test: {})", self.name),
+ TargetKind::Bench => write!(f, "Target(bench: {})", self.name),
+ TargetKind::Example => write!(f, "Target(example: {})", self.name),
+ TargetKind::CustomBuild => write!(f, "Target(script)"),
+ }
+ }
+}
+
impl Profile {
pub fn default_dev() -> Profile {
Profile {
..Profile::default_dev()
}
}
+
+ pub fn default_custom_build() -> Profile {
+ Profile {
+ run_custom_build: true,
+ ..Profile::default_dev()
+ }
+ }
}
impl Default for Profile {
rpath: false,
test: false,
doc: false,
+ run_custom_build: false,
}
}
}
+
+impl fmt::Display for Profile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.test {
+ write!(f, "Profile(test)")
+ } else if self.doc {
+ write!(f, "Profile(doc)")
+ } else if self.run_custom_build {
+ write!(f, "Profile(run)")
+ } else {
+ write!(f, "Profile(build)")
+ }
+
+ }
+}
#[cfg(test)] extern crate hamcrest;
#[macro_use] extern crate log;
extern crate crates_io as registry;
+extern crate crossbeam;
extern crate curl;
extern crate docopt;
extern crate filetime;
extern crate semver;
extern crate tar;
extern crate term;
-extern crate threadpool;
extern crate time;
extern crate toml;
extern crate url;
use core::{Package, PackageSet, Profiles, Profile};
use core::source::{Source, SourceMap};
use util::{CargoResult, human, ChainError, Config};
-use ops::{self, Layout, Context, BuildConfig, Kind};
+use ops::{self, Layout, Context, BuildConfig, Kind, Unit};
pub struct CleanOptions<'a> {
pub spec: &'a [String],
try!(rm_rf(&layout.fingerprint(&pkg)));
let profiles = [Profile::default_dev(), Profile::default_test()];
for profile in profiles.iter() {
- for filename in try!(cx.target_filenames(&pkg, target, profile,
- Kind::Target)).iter() {
+ let unit = Unit {
+ pkg: &pkg,
+ target: target,
+ profile: profile,
+ kind: Kind::Target,
+ };
+ for filename in try!(cx.target_filenames(&unit)).iter() {
try!(rm_rf(&layout.dest().join(&filename)));
try!(rm_rf(&layout.deps().join(&filename)));
}
pub libraries: HashMap<PackageId, Vec<(Target, PathBuf)>>,
/// An array of all tests created during this compilation.
- pub tests: Vec<(Package, Vec<(String, PathBuf)>)>,
+ pub tests: Vec<(Package, String, PathBuf)>,
/// An array of all binaries created.
pub binaries: Vec<PathBuf>,
/// Extra environment variables that were passed to compilations and should
/// be passed to future invocations of programs.
- pub extra_env: HashMap<String, String>,
+ pub extra_env: HashMap<PackageId, Vec<(String, String)>>,
pub to_doc_test: Vec<Package>,
}
/// See `process`.
- pub fn rustdoc_process(&self, pkg: &Package) -> CargoResult<CommandPrototype> {
+ pub fn rustdoc_process(&self, pkg: &Package)
+ -> CargoResult<CommandPrototype> {
self.process(CommandType::Rustdoc, pkg)
}
util::dylib_path_envvar()));
let mut cmd = try!(CommandPrototype::new(cmd, self.config));
cmd.env(util::dylib_path_envvar(), &search_path);
- for (k, v) in self.extra_env.iter() {
- cmd.env(k, v);
+ if let Some(env) = self.extra_env.get(pkg.package_id()) {
+ for &(ref k, ref v) in env {
+ cmd.env(k, v);
+ }
}
cmd.env("CARGO_MANIFEST_DIR", pkg.root())
-use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::{HashSet, HashMap};
use std::path::{Path, PathBuf};
use std::str;
use util::human;
use super::TargetConfig;
-use super::custom_build::BuildState;
+use super::custom_build::{BuildState, BuildScripts};
use super::fingerprint::Fingerprint;
use super::layout::{Layout, LayoutProxy};
use super::{Kind, Compilation, BuildConfig};
use super::{ProcessEngine, ExecEngine};
-use super::PackagesToBuild;
-#[derive(Debug, Clone, Copy)]
-pub enum Platform {
- Target,
- Plugin,
- PluginAndTarget,
+#[derive(Clone, Copy, Eq, PartialEq, Hash)]
+pub struct Unit<'a> {
+ pub pkg: &'a Package,
+ pub target: &'a Target,
+ pub profile: &'a Profile,
+ pub kind: Kind,
}
pub struct Context<'a, 'cfg: 'a> {
pub compilation: Compilation<'cfg>,
pub build_state: Arc<BuildState>,
pub exec_engine: Arc<Box<ExecEngine>>,
- pub fingerprints: HashMap<(&'a PackageId, &'a Target, &'a Profile, Kind),
- Fingerprint>,
- pub compiled: HashSet<(&'a PackageId, &'a Target, &'a Profile)>,
+ pub fingerprints: HashMap<Unit<'a>, Fingerprint>,
+ pub compiled: HashSet<Unit<'a>>,
pub build_config: BuildConfig,
- pub build_scripts: HashMap<(&'a PackageId, &'a Target, &'a Profile, Kind),
- Vec<&'a PackageId>>,
+ pub build_scripts: HashMap<Unit<'a>, Arc<BuildScripts>>,
host: Layout,
target: Option<Layout>,
package_set: &'a PackageSet,
target_dylib: Option<(String, String)>,
target_exe: String,
- requirements: HashMap<(&'a PackageId, &'a str), Platform>,
profiles: &'a Profiles,
}
target_exe: target_exe,
host_dylib: host_dylib,
host_exe: host_exe,
- requirements: HashMap::new(),
compilation: Compilation::new(config),
build_state: Arc::new(BuildState::new(&build_config, deps)),
build_config: build_config,
/// Prepare this context, ensuring that all filesystem directories are in
/// place.
- pub fn prepare(&mut self, root: &Package,
- pkgs: &'a PackagesToBuild<'a>) -> CargoResult<()> {
+ pub fn prepare(&mut self, root: &Package) -> CargoResult<()> {
let _p = profile::start("preparing layout");
try!(self.host.prepare().chain_error(|| {
None => {}
}
- for &(pkg, ref targets) in pkgs {
- for &(target, profile) in targets {
- self.build_requirements(pkg, target, profile, Kind::from(target));
- }
- }
-
- let jobs = self.jobs();
- self.compilation.extra_env.insert("NUM_JOBS".to_string(),
- jobs.to_string());
self.compilation.root_output =
self.layout(root, Kind::Target).proxy().dest().to_path_buf();
self.compilation.deps_output =
return Ok(());
}
- fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target,
- profile: &Profile, kind: Kind) {
- let req = if kind == Kind::Host { Platform::Plugin } else { Platform::Target };
-
- match self.requirements.entry((pkg.package_id(), target.name())) {
- Occupied(mut entry) => match (*entry.get(), req) {
- (Platform::Plugin, Platform::Plugin) |
- (Platform::PluginAndTarget, Platform::Plugin) |
- (Platform::Target, Platform::Target) |
- (Platform::PluginAndTarget, Platform::Target) |
- (Platform::PluginAndTarget, Platform::PluginAndTarget) => return,
- _ => *entry.get_mut() = entry.get().combine(req),
- },
- Vacant(entry) => { entry.insert(req); }
- };
-
- for (pkg, dep, profile) in self.dep_targets(pkg, target, kind, profile) {
- self.build_requirements(pkg, dep, profile, kind.for_target(dep));
- }
-
- match pkg.targets().iter().find(|t| t.is_custom_build()) {
- Some(custom_build) => {
- let profile = self.build_script_profile(pkg.package_id());
- self.build_requirements(pkg, custom_build, profile, Kind::Host);
- }
- None => {}
- }
- }
-
- pub fn get_requirement(&self, pkg: &'a Package,
- target: &'a Target) -> Platform {
- let default = if target.for_host() {
- Platform::Plugin
- } else {
- Platform::Target
- };
- self.requirements.get(&(pkg.package_id(), target.name()))
- .map(|a| *a).unwrap_or(default)
- }
-
/// Returns the appropriate directory layout for either a plugin or not.
pub fn layout(&self, pkg: &Package, kind: Kind) -> LayoutProxy {
let primary = pkg.package_id() == self.resolve.root();
match kind {
Kind::Host => LayoutProxy::new(&self.host, primary),
- Kind::Target => LayoutProxy::new(self.target.as_ref()
- .unwrap_or(&self.host),
- primary),
+ Kind::Target => LayoutProxy::new(self.target.as_ref()
+ .unwrap_or(&self.host),
+ primary),
}
}
/// Returns the appropriate output directory for the specified package and
/// target.
- pub fn out_dir(&self, pkg: &Package, kind: Kind, target: &Target) -> PathBuf {
- let out_dir = self.layout(pkg, kind);
- if target.is_custom_build() {
- out_dir.build(pkg)
- } else if target.is_example() {
+ pub fn out_dir(&self, unit: &Unit) -> PathBuf {
+ let out_dir = self.layout(unit.pkg, unit.kind);
+ if unit.target.is_custom_build() {
+ out_dir.build(unit.pkg)
+ } else if unit.target.is_example() {
out_dir.examples().to_path_buf()
} else {
out_dir.root().to_path_buf()
}
/// Get the metadata for a target in a specific profile
- pub fn target_metadata(&self, pkg: &Package, target: &Target,
- profile: &Profile) -> Option<Metadata> {
- let metadata = target.metadata();
- if target.is_lib() && profile.test {
+ pub fn target_metadata(&self, unit: &Unit) -> Option<Metadata> {
+ let metadata = unit.target.metadata();
+ if unit.target.is_lib() && unit.profile.test {
// Libs and their tests are built in parallel, so we need to make
// sure that their metadata is different.
metadata.map(|m| m.clone()).map(|mut m| {
m.mix(&"test");
m
})
- } else if target.is_bin() && profile.test {
+ } else if unit.target.is_bin() && unit.profile.test {
// Make sure that the name of this test executable doesn't
// conflict with a library that has the same name and is
// being tested
- let mut metadata = pkg.generate_metadata();
- metadata.mix(&format!("bin-{}", target.name()));
+ let mut metadata = unit.pkg.generate_metadata();
+ metadata.mix(&format!("bin-{}", unit.target.name()));
Some(metadata)
- } else if pkg.package_id() == self.resolve.root() && !profile.test {
+ } else if unit.pkg.package_id() == self.resolve.root() &&
+ !unit.profile.test {
// If we're not building a unit test then the root package never
// needs any metadata as it's guaranteed to not conflict with any
// other output filenames. This means that we'll have predictable
}
/// Returns the file stem for a given target/profile combo
- pub fn file_stem(&self, pkg: &Package, target: &Target,
- profile: &Profile) -> String {
- match self.target_metadata(pkg, target, profile) {
- Some(ref metadata) => format!("{}{}", target.crate_name(),
+ pub fn file_stem(&self, unit: &Unit) -> String {
+ match self.target_metadata(unit) {
+ Some(ref metadata) => format!("{}{}", unit.target.crate_name(),
metadata.extra_filename),
- None if target.allows_underscores() => target.name().to_string(),
- None => target.crate_name().to_string(),
+ None if unit.target.allows_underscores() => {
+ unit.target.name().to_string()
+ }
+ None => unit.target.crate_name().to_string(),
}
}
/// Return the filenames that the given target for the given profile will
/// generate.
- pub fn target_filenames(&self, pkg: &Package, target: &Target,
- profile: &Profile, kind: Kind)
- -> CargoResult<Vec<String>> {
- let stem = self.file_stem(pkg, target, profile);
- let suffix = if target.for_host() {&self.host_exe} else {&self.target_exe};
+ pub fn target_filenames(&self, unit: &Unit) -> CargoResult<Vec<String>> {
+ let stem = self.file_stem(unit);
+ let suffix = if unit.target.for_host() {
+ &self.host_exe
+ } else {
+ &self.target_exe
+ };
let mut ret = Vec::new();
- match *target.kind() {
+ match *unit.target.kind() {
TargetKind::Example | TargetKind::Bin | TargetKind::CustomBuild |
TargetKind::Bench | TargetKind::Test => {
ret.push(format!("{}{}", stem, suffix));
}
- TargetKind::Lib(..) if profile.test => {
+ TargetKind::Lib(..) if unit.profile.test => {
ret.push(format!("{}{}", stem, suffix));
}
TargetKind::Lib(ref libs) => {
for lib in libs.iter() {
match *lib {
LibKind::Dylib => {
- if let Ok((prefix, suffix)) = self.dylib(kind) {
+ if let Ok((prefix, suffix)) = self.dylib(unit.kind) {
ret.push(format!("{}{}{}", prefix, stem, suffix));
}
}
/// For a package, return all targets which are registered as dependencies
/// for that package.
- pub fn dep_targets(&self, pkg: &Package, target: &Target, kind: Kind,
- profile: &Profile)
- -> Vec<(&'a Package, &'a Target, &'a Profile)> {
- if profile.doc {
- return self.doc_deps(pkg, target, kind);
+ pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec<Unit<'a>> {
+ if unit.profile.run_custom_build {
+ return self.dep_run_custom_build(unit, false)
+ } else if unit.profile.doc {
+ return self.doc_deps(unit);
}
- let deps = match self.resolve.deps(pkg.package_id()) {
- None => return Vec::new(),
- Some(deps) => deps,
- };
+
+ let id = unit.pkg.package_id();
+ let deps = self.resolve.deps(id).into_iter().flat_map(|a| a);
let mut ret = deps.map(|id| self.get_package(id)).filter(|dep| {
- pkg.dependencies().iter().filter(|d| {
+ unit.pkg.dependencies().iter().filter(|d| {
d.name() == dep.name()
}).any(|d| {
// If this target is a build command, then we only want build
// dependencies, otherwise we want everything *other than* build
// dependencies.
- let is_correct_dep = target.is_custom_build() == d.is_build();
+ if unit.target.is_custom_build() != d.is_build() {
+ return false
+ }
// If this dependency is *not* a transitive dependency, then it
// only applies to test/example targets
- let is_actual_dep = d.is_transitive() ||
- target.is_test() ||
- target.is_example() ||
- profile.test;
+ if !d.is_transitive() && !unit.target.is_test() &&
+ !unit.target.is_example() && !unit.profile.test {
+ return false
+ }
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
- let is_platform_same = self.dep_platform_activated(d, kind);
+ if !self.dep_platform_activated(d, unit.kind) {
+ return false
+ }
// If the dependency is optional, then we're only activating it
// if the corresponding feature was activated
- let activated = !d.is_optional() ||
- self.resolve.features(pkg.package_id()).map(|f| {
- f.contains(d.name())
- }).unwrap_or(false);
+ if d.is_optional() {
+ match self.resolve.features(id) {
+ Some(f) if f.contains(d.name()) => {}
+ _ => return false,
+ }
+ }
- is_correct_dep && is_actual_dep && is_platform_same && activated
+ // If we've gotten past all that, then this dependency is
+ // actually used!
+ true
})
}).filter_map(|pkg| {
pkg.targets().iter().find(|t| t.is_lib()).map(|t| {
- (pkg, t, self.lib_profile(pkg.package_id()))
+ Unit {
+ pkg: pkg,
+ target: t,
+ profile: self.lib_profile(id),
+ kind: unit.kind.for_target(t),
+ }
})
}).collect::<Vec<_>>();
// If a target isn't actually a build script itself, then it depends on
// the build script if there is one.
- if target.is_custom_build() { return ret }
- let pkg = self.get_package(pkg.package_id());
- if let Some(t) = pkg.targets().iter().find(|t| t.is_custom_build()) {
- ret.push((pkg, t, self.build_script_profile(pkg.package_id())));
+ if unit.target.is_custom_build() {
+ return ret
}
+ ret.extend(self.build_script_if_run(unit, false));
// If this target is a binary, test, example, etc, then it depends on
// the library of the same package. The call to `resolve.deps` above
// didn't include `pkg` in the return values, so we need to special case
// it here and see if we need to push `(pkg, pkg_lib_target)`.
- if target.is_lib() { return ret }
- if let Some(t) = pkg.targets().iter().find(|t| t.linkable()) {
- ret.push((pkg, t, self.lib_profile(pkg.package_id())));
+ if unit.target.is_lib() {
+ return ret
}
+ ret.extend(self.maybe_lib(unit));
// Integration tests/benchmarks require binaries to be built
- if profile.test && (target.is_test() || target.is_bench()) {
- ret.extend(pkg.targets().iter().filter(|t| t.is_bin())
- .map(|t| (pkg, t, self.lib_profile(pkg.package_id()))));
+ if unit.profile.test &&
+ (unit.target.is_test() || unit.target.is_bench()) {
+ ret.extend(unit.pkg.targets().iter().filter(|t| t.is_bin()).map(|t| {
+ Unit {
+ pkg: unit.pkg,
+ target: t,
+ profile: self.lib_profile(id),
+ kind: unit.kind.for_target(t),
+ }
+ }));
}
return ret
}
+ pub fn dep_run_custom_build(&self,
+ unit: &Unit<'a>,
+ include_overridden: bool) -> Vec<Unit<'a>> {
+ let not_custom_build = unit.pkg.targets().iter().find(|t| {
+ !t.is_custom_build()
+ }).unwrap();
+ let tmp = Unit {
+ target: not_custom_build,
+ profile: &self.profiles.dev,
+ ..*unit
+ };
+ let mut ret = self.dep_targets(&tmp).iter().filter_map(|unit| {
+ if !unit.target.linkable() || unit.pkg.manifest().links().is_none() {
+ return None
+ }
+ self.build_script_if_run(unit, include_overridden)
+ }).collect::<Vec<_>>();
+ ret.push(Unit {
+ profile: self.build_script_profile(unit.pkg.package_id()),
+ kind: Kind::Host,
+ ..*unit
+ });
+ return ret
+ }
+
/// Returns the dependencies necessary to document a package
- fn doc_deps(&self, pkg: &Package, target: &Target, kind: Kind)
- -> Vec<(&'a Package, &'a Target, &'a Profile)> {
- let pkg = self.get_package(pkg.package_id());
- let deps = self.resolve.deps(pkg.package_id()).into_iter();
+ fn doc_deps(&self, unit: &Unit<'a>) -> Vec<Unit<'a>> {
+ let deps = self.resolve.deps(unit.pkg.package_id()).into_iter();
let deps = deps.flat_map(|a| a).map(|id| {
self.get_package(id)
}).filter(|dep| {
- pkg.dependencies().iter().filter(|d| {
+ unit.pkg.dependencies().iter().filter(|d| {
d.name() == dep.name()
}).any(|dep| {
match dep.kind() {
- DepKind::Normal => self.dep_platform_activated(dep, kind),
+ DepKind::Normal => self.dep_platform_activated(dep,
+ unit.kind),
_ => false,
}
})
// the documentation of the library being built.
let mut ret = Vec::new();
for (dep, lib) in deps {
- ret.push((dep, lib, self.lib_profile(dep.package_id())));
+ ret.push(Unit {
+ pkg: dep,
+ target: lib,
+ profile: self.lib_profile(dep.package_id()),
+ kind: unit.kind.for_target(lib),
+ });
if self.build_config.doc_all {
- ret.push((dep, lib, &self.profiles.doc));
+ ret.push(Unit {
+ pkg: dep,
+ target: lib,
+ profile: &self.profiles.doc,
+ kind: unit.kind.for_target(lib),
+ });
}
}
// Be sure to build/run the build script for documented libraries as
- if let Some(t) = pkg.targets().iter().find(|t| t.is_custom_build()) {
- ret.push((pkg, t, self.build_script_profile(pkg.package_id())));
- }
+ ret.extend(self.build_script_if_run(unit, false));
// If we document a binary, we need the library available
- if target.is_bin() {
- if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) {
- ret.push((pkg, t, self.lib_profile(pkg.package_id())));
- }
+ if unit.target.is_bin() {
+ ret.extend(self.maybe_lib(unit));
}
return ret
}
+ /// Returns the build script for a package if that build script is actually
+ /// intended to be run for `kind` as part of this compilation.
+ ///
+ /// Build scripts are not run if they are overridden by some global
+ /// configuration.
+ fn build_script_if_run(&self, unit: &Unit<'a>,
+ allow_overridden: bool) -> Option<Unit<'a>> {
+ let target = match unit.pkg.targets().iter().find(|t| t.is_custom_build()) {
+ Some(t) => t,
+ None => return None,
+ };
+ let key = (unit.pkg.package_id().clone(), unit.kind);
+ if !allow_overridden &&
+ unit.pkg.manifest().links().is_some() &&
+ self.build_state.outputs.lock().unwrap().contains_key(&key) {
+ return None
+ }
+ Some(Unit {
+ pkg: unit.pkg,
+ target: target,
+ profile: &self.profiles.custom_build,
+ kind: unit.kind,
+ })
+ }
+
+ fn maybe_lib(&self, unit: &Unit<'a>) -> Option<Unit<'a>> {
+ unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| {
+ Unit {
+ pkg: unit.pkg,
+ target: t,
+ profile: self.lib_profile(unit.pkg.package_id()),
+ kind: unit.kind.for_target(t),
+ }
+ })
+ }
+
fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool {
// If this dependency is only available for certain platforms,
// make sure we're only enabling it for that platform.
&self.profiles.dev
}
}
-
-impl Platform {
- pub fn combine(self, other: Platform) -> Platform {
- match (self, other) {
- (Platform::Target, Platform::Target) => Platform::Target,
- (Platform::Plugin, Platform::Plugin) => Platform::Plugin,
- _ => Platform::PluginAndTarget,
- }
- }
-
- pub fn includes(self, kind: Kind) -> bool {
- match (self, kind) {
- (Platform::PluginAndTarget, _) |
- (Platform::Target, Kind::Target) |
- (Platform::Plugin, Kind::Host) => true,
- _ => false,
- }
- }
-}
use std::io::prelude::*;
use std::path::PathBuf;
use std::str;
-use std::sync::Mutex;
+use std::sync::{Mutex, Arc};
-use core::{Package, Target, PackageId, PackageSet, Profile};
+use core::{PackageId, PackageSet};
use util::{CargoResult, human, Human};
use util::{internal, ChainError, profile};
use util::Freshness;
use super::job::Work;
-use super::{fingerprint, process, Kind, Context, Platform};
+use super::{fingerprint, process, Kind, Context, Unit};
use super::CommandType;
-use super::PackagesToBuild;
/// Contains the parsed output of a custom build script.
#[derive(Clone, Debug)]
pub outputs: Mutex<BuildMap>,
}
+#[derive(Default)]
+pub struct BuildScripts {
+ pub to_link: Vec<(PackageId, Kind)>,
+ pub plugins: Vec<PackageId>,
+}
+
/// Prepares a `Work` that executes the target as a custom build script.
///
/// The `req` given is the requirement which this run of the build script will
/// prepare work for. If the requirement is specified as both the target and the
/// host platforms it is assumed that the two are equal and the build script is
/// only run once (not twice).
-pub fn prepare(pkg: &Package, target: &Target, req: Platform,
- cx: &mut Context) -> CargoResult<(Work, Work, Freshness)> {
+pub fn prepare(cx: &mut Context, unit: &Unit)
+ -> CargoResult<(Work, Work, Freshness)> {
let _p = profile::start(format!("build script prepare: {}/{}",
- pkg, target.name()));
- let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target, };
+ unit.pkg, unit.target.name()));
let (script_output, build_output) = {
- (cx.layout(pkg, Kind::Host).build(pkg),
- cx.layout(pkg, kind).build_out(pkg))
+ (cx.layout(unit.pkg, Kind::Host).build(unit.pkg),
+ cx.layout(unit.pkg, unit.kind).build_out(unit.pkg))
};
// Building the command to execute
- let to_exec = script_output.join(target.name());
+ let to_exec = script_output.join(unit.target.name());
// Start preparing the process to execute, starting out with some
// environment variables. Note that the profile-related environment
// variables are not set with this the build script's profile but rather the
// package's library profile.
- let profile = cx.lib_profile(pkg.package_id());
+ let profile = cx.lib_profile(unit.pkg.package_id());
let to_exec = to_exec.into_os_string();
- let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx));
+ let mut p = try!(super::process(CommandType::Host(to_exec), unit.pkg, cx));
p.env("OUT_DIR", &build_output)
- .env("CARGO_MANIFEST_DIR", pkg.root())
+ .env("CARGO_MANIFEST_DIR", unit.pkg.root())
.env("NUM_JOBS", &cx.jobs().to_string())
- .env("TARGET", &match kind {
+ .env("TARGET", &match unit.kind {
Kind::Host => &cx.config.rustc_info().host[..],
Kind::Target => cx.target_triple(),
})
// Be sure to pass along all enabled features for this package, this is the
// last piece of statically known information that we have.
- match cx.resolve.features(pkg.package_id()) {
- Some(features) => {
- for feat in features.iter() {
- p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
- }
+ if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
+ for feat in features.iter() {
+ p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1");
}
- None => {}
}
// Gather the set of native dependencies that this package has along with
// This information will be used at build-time later on to figure out which
// sorts of variables need to be discovered at that time.
let lib_deps = {
- let not_custom = pkg.targets().iter().find(|t| {
- !t.is_custom_build()
- }).unwrap();
- cx.dep_targets(pkg, not_custom, kind, profile).iter()
- .filter_map(|&(pkg, t, _)| {
- if !t.linkable() { return None }
- pkg.manifest().links().map(|links| {
- (links.to_string(), pkg.package_id().clone())
- })
+ cx.dep_run_custom_build(unit, true).iter().filter_map(|unit| {
+ if unit.profile.run_custom_build {
+ Some((unit.pkg.manifest().links().unwrap().to_string(),
+ unit.pkg.package_id().clone()))
+ } else {
+ None
+ }
}).collect::<Vec<_>>()
};
- let pkg_name = pkg.to_string();
+ let pkg_name = unit.pkg.to_string();
let build_state = cx.build_state.clone();
- let id = pkg.package_id().clone();
+ let id = unit.pkg.package_id().clone();
let all = (id.clone(), pkg_name.clone(), build_state.clone(),
build_output.clone());
- let plugin_deps = super::load_build_deps(cx, pkg, target, profile,
- Kind::Host);
+ let build_scripts = super::load_build_deps(cx, unit);
+ let kind = unit.kind;
- try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg)));
- try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg)));
+ try!(fs::create_dir_all(&cx.layout(unit.pkg, Kind::Host).build(unit.pkg)));
+ try!(fs::create_dir_all(&cx.layout(unit.pkg, unit.kind).build(unit.pkg)));
let exec_engine = cx.exec_engine.clone();
// native dynamic libraries.
{
let build_state = build_state.outputs.lock().unwrap();
- for &(ref name, ref id) in lib_deps.iter() {
- let data = &build_state[&(id.clone(), kind)].metadata;
+ for (name, id) in lib_deps {
+ let key = (id.clone(), kind);
+ let state = try!(build_state.get(&key).chain_error(|| {
+ internal(format!("failed to locate build state for env \
+ vars: {}/{:?}", id, kind))
+ }));
+ let data = &state.metadata;
for &(ref key, ref value) in data.iter() {
- p.env(&format!("DEP_{}_{}", super::envify(name),
+ p.env(&format!("DEP_{}_{}", super::envify(&name),
super::envify(key)), value);
}
}
- try!(super::add_plugin_deps(&mut p, &build_state, plugin_deps));
+ if let Some(build_scripts) = build_scripts {
+ try!(super::add_plugin_deps(&mut p, &build_state,
+ &build_scripts));
+ }
}
// And now finally, run the build command itself!
human("build script output was not valid utf-8")
}));
let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
- build_state.insert(id, req, parsed_output);
+ build_state.insert(id, kind, parsed_output);
try!(File::create(&build_output.parent().unwrap().join("output"))
.and_then(|mut f| f.write_all(output.as_bytes()))
//
// Also note that a fresh build command needs to
let (freshness, dirty, fresh) =
- try!(fingerprint::prepare_build_cmd(cx, pkg, kind));
- let dirty = Work::new(move |tx| {
- try!(work.call((tx.clone())));
- dirty.call(tx)
- });
- let fresh = Work::new(move |tx| {
+ try!(fingerprint::prepare_build_cmd(cx, unit));
+ let dirty = work.then(dirty);
+ let fresh = Work::new(move |_tx| {
let (id, pkg_name, build_state, build_output) = all;
let new_loc = build_output.parent().unwrap().join("output");
let mut f = try!(File::open(&new_loc).map_err(|e| {
let mut contents = String::new();
try!(f.read_to_string(&mut contents));
let output = try!(BuildOutput::parse(&contents, &pkg_name));
- build_state.insert(id, req, output);
-
- fresh.call(tx)
- });
+ build_state.insert(id, kind, output);
+ Ok(())
+ }).then(fresh);
Ok((dirty, fresh, freshness))
}
BuildState { outputs: Mutex::new(outputs) }
}
- fn insert(&self, id: PackageId, req: Platform,
- output: BuildOutput) {
- let mut outputs = self.outputs.lock().unwrap();
- match req {
- Platform::Target => { outputs.insert((id, Kind::Target), output); }
- Platform::Plugin => { outputs.insert((id, Kind::Host), output); }
-
- // If this build output was for both the host and target platforms,
- // we need to insert it at both places.
- Platform::PluginAndTarget => {
- outputs.insert((id.clone(), Kind::Host), output.clone());
- outputs.insert((id, Kind::Target), output);
- }
- }
+ fn insert(&self, id: PackageId, kind: Kind, output: BuildOutput) {
+ self.outputs.lock().unwrap().insert((id, kind), output);
}
}
/// The given set of targets to this function is the initial set of
/// targets/profiles which are being built.
pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>,
- pkgs: &'b PackagesToBuild<'b>) {
+ units: &[Unit<'b>]) {
let mut ret = HashMap::new();
- for &(pkg, ref targets) in pkgs {
- for &(target, profile) in targets {
- build(&mut ret, Kind::Target, pkg, target, profile, cx);
- build(&mut ret, Kind::Host, pkg, target, profile, cx);
- }
+ for unit in units {
+ build(&mut ret, cx, unit);
}
// Make the output a little more deterministic by sorting all dependencies
- for (&(id, target, _, kind), slot) in ret.iter_mut() {
- slot.sort();
- slot.dedup();
- debug!("script deps: {}/{}/{:?} => {:?}", id, target.name(), kind,
- slot.iter().map(|s| s.to_string()).collect::<Vec<_>>());
+ for (_, slot) in ret.iter_mut() {
+ slot.to_link.sort_by(|a, b| a.0.cmp(&b.0));
+ slot.to_link.dedup();
+ slot.plugins.sort();
+ slot.plugins.dedup();
}
- cx.build_scripts = ret;
+ cx.build_scripts.extend(ret.into_iter().map(|(k, v)| {
+ (k, Arc::new(v))
+ }));
// Recursive function to build up the map we're constructing. This function
// memoizes all of its return values as it goes along.
- fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<(&'b PackageId, &'b Target,
- &'b Profile, Kind),
- Vec<&'b PackageId>>,
- kind: Kind,
- pkg: &'b Package,
- target: &'b Target,
- profile: &'b Profile,
- cx: &Context<'b, 'cfg>)
- -> &'a [&'b PackageId] {
- // If this target has crossed into "host-land" we need to change the
- // kind that we're compiling for, and otherwise just do a quick
- // pre-flight check to see if we've already calculated the set of
- // dependencies.
- let kind = kind.for_target(target);
- let id = pkg.package_id();
- if out.contains_key(&(id, target, profile, kind)) {
- return &out[&(id, target, profile, kind)]
+ fn build<'a, 'b, 'cfg>(out: &'a mut HashMap<Unit<'b>, BuildScripts>,
+ cx: &Context<'b, 'cfg>,
+ unit: &Unit<'b>)
+ -> &'a BuildScripts {
+ // Do a quick pre-flight check to see if we've already calculated the
+ // set of dependencies.
+ if out.contains_key(unit) {
+ return &out[unit]
}
- // This loop is both the recursive and additive portion of this
- // function, the key part of the logic being around determining the
- // right `kind` to recurse on. If a dependency fits in the kind that
- // we've got specified, then we just keep plazing a trail, but otherwise
- // we *switch* the kind we're looking at because it must fit into the
- // other category.
- //
- // We always recurse, but only add to our own array if the target is
- // linkable to us (e.g. not a binary) and it's for the same original
- // `kind`.
- let mut ret = Vec::new();
- for (pkg, target, p) in cx.dep_targets(pkg, target, kind, profile) {
- let req = cx.get_requirement(pkg, target);
-
- let dep_kind = if req.includes(kind) {
- kind
- } else if kind == Kind::Target {
- Kind::Host
- } else {
- Kind::Target
- };
- let dep_scripts = build(out, dep_kind, pkg, target, p, cx);
+ let mut to_link = Vec::new();
+ let mut plugins = Vec::new();
- if target.linkable() && kind == dep_kind {
- if pkg.has_custom_build() {
- ret.push(pkg.package_id());
- }
- ret.extend(dep_scripts.iter().cloned());
+ if !unit.target.is_custom_build() && unit.pkg.has_custom_build() {
+ to_link.push((unit.pkg.package_id().clone(), unit.kind));
+ }
+ for unit in cx.dep_targets(unit).iter() {
+ let dep_scripts = build(out, cx, unit);
+
+ if unit.target.for_host() {
+ plugins.extend(dep_scripts.to_link.iter()
+ .map(|p| &p.0).cloned());
+ } else if unit.target.linkable() {
+ to_link.extend(dep_scripts.to_link.iter().cloned());
}
}
- let prev = out.entry((id, target, profile, kind)).or_insert(Vec::new());
- prev.extend(ret);
+ let prev = out.entry(*unit).or_insert(BuildScripts::default());
+ prev.to_link.extend(to_link);
+ prev.plugins.extend(plugins);
return prev
}
}
use filetime::FileTime;
-use core::{Package, Target, Profile};
+use core::{Package, TargetKind};
use util;
use util::{CargoResult, Fresh, Dirty, Freshness, internal, profile, ChainError};
-use super::Kind;
use super::job::Work;
-use super::context::Context;
+use super::context::{Context, Unit};
/// A tuple result of the `prepare_foo` functions in this module.
///
/// work necessary to either write the fingerprint or copy over all fresh files
/// from the old directories to their new locations.
pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- pkg: &'a Package,
- target: &'a Target,
- profile: &'a Profile,
- kind: Kind) -> CargoResult<Preparation> {
+ unit: &Unit<'a>) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint: {} / {}",
- pkg.package_id(), target.name()));
- let new = dir(cx, pkg, kind);
- let loc = new.join(&filename(target, profile));
+ unit.pkg.package_id(), unit.target.name()));
+ let new = dir(cx, unit);
+ let loc = new.join(&filename(unit));
- info!("fingerprint at: {}", loc.display());
+ debug!("fingerprint at: {}", loc.display());
- let mut fingerprint = try!(calculate(cx, pkg, target, profile, kind));
+ let mut fingerprint = try!(calculate(cx, unit));
let is_fresh = try!(is_fresh(&loc, &mut fingerprint));
- let root = cx.out_dir(pkg, kind, target);
+
+ let root = cx.out_dir(unit);
let mut missing_outputs = false;
- if !profile.doc {
- for filename in try!(cx.target_filenames(pkg, target, profile,
- kind)).iter() {
+ if !unit.profile.doc {
+ for filename in try!(cx.target_filenames(unit)).iter() {
missing_outputs |= fs::metadata(root.join(filename)).is_err();
}
}
- let allow_failure = profile.rustc_args.is_some();
- Ok(prepare(is_fresh && !missing_outputs, allow_failure, loc, fingerprint))
+ let allow_failure = unit.profile.rustc_args.is_some();
+ Ok(prepare(is_fresh && !missing_outputs,
+ allow_failure, loc, fingerprint))
}
/// A fingerprint can be considered to be a "short string" representing the
///
/// Information like file modification time is only calculated for path
/// dependencies and is calculated in `calculate_target_fresh`.
-fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- pkg: &'a Package,
- target: &'a Target,
- profile: &'a Profile,
- kind: Kind)
+fn calculate<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>)
-> CargoResult<Fingerprint> {
- let key = (pkg.package_id(), target, profile, kind);
- match cx.fingerprints.get(&key) {
- Some(s) => return Ok(s.clone()),
- None => {}
+ if let Some(s) = cx.fingerprints.get(unit) {
+ return Ok(s.clone())
}
// First, calculate all statically known "salt data" such as the profile
// information (compiler flags), the compiler version, activated features,
// and target configuration.
- let features = cx.resolve.features(pkg.package_id());
+ let features = cx.resolve.features(unit.pkg.package_id());
let features = features.map(|s| {
- let mut v = s.iter().collect::<Vec<&String>>();
+ let mut v = s.iter().collect::<Vec<_>>();
v.sort();
v
});
let extra = util::short_hash(&(&cx.config.rustc_info().verbose_version,
- target, &features, profile));
- debug!("extra {:?} {:?} {:?} = {}", target, profile, features, extra);
+ unit.target, &features, unit.profile));
// Next, recursively calculate the fingerprint for all of our dependencies.
//
// elsewhere. Also skip fingerprints of binaries because they don't actually
// induce a recompile, they're just dependencies in the sense that they need
// to be built.
- let deps = try!(cx.dep_targets(pkg, target, kind, profile).into_iter()
- .filter(|&(_, t, _)| !t.is_custom_build() && !t.is_bin())
- .map(|(pkg, target, profile)| {
- let kind = match kind {
- Kind::Host => Kind::Host,
- Kind::Target if target.for_host() => Kind::Host,
- Kind::Target => Kind::Target,
- };
- calculate(cx, pkg, target, profile, kind)
+ let deps = try!(cx.dep_targets(unit).iter().filter(|u| {
+ !u.target.is_custom_build() && !u.target.is_bin()
+ }).map(|unit| {
+ calculate(cx, unit).map(|fingerprint| {
+ fingerprint
+ })
}).collect::<CargoResult<Vec<_>>>());
// And finally, calculate what our own local fingerprint is
- let local = if use_dep_info(pkg, profile) {
- let dep_info = dep_info_loc(cx, pkg, target, profile, kind);
+ let local = if use_dep_info(unit) {
+ let dep_info = dep_info_loc(cx, unit);
let mtime = try!(calculate_target_mtime(&dep_info));
// if the mtime listed is not fresh, then remove the `dep_info` file to
}
LocalFingerprint::MtimeBased(mtime, dep_info)
} else {
- LocalFingerprint::Precalculated(try!(calculate_pkg_fingerprint(cx, pkg)))
+ LocalFingerprint::Precalculated(try!(calculate_pkg_fingerprint(cx,
+ unit.pkg)))
};
let fingerprint = Arc::new(FingerprintInner {
extra: extra,
local: local,
resolved: Mutex::new(None),
});
- cx.fingerprints.insert(key, fingerprint.clone());
+ cx.fingerprints.insert(*unit, fingerprint.clone());
Ok(fingerprint)
}
// git/registry source, then the mtime of files may fluctuate, but they won't
// change so long as the source itself remains constant (which is the
// responsibility of the source)
-fn use_dep_info(pkg: &Package, profile: &Profile) -> bool {
- let path = pkg.summary().source_id().is_path();
- !profile.doc && path
+fn use_dep_info(unit: &Unit) -> bool {
+ let path = unit.pkg.summary().source_id().is_path();
+ !unit.profile.doc && path
}
/// Prepare the necessary work for the fingerprint of a build command.
///
/// The currently implemented solution is option (1), although it is planned to
/// migrate to option (2) in the near future.
-pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package, kind: Kind)
+pub fn prepare_build_cmd(cx: &mut Context, unit: &Unit)
-> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint build cmd: {}",
- pkg.package_id()));
- let new = dir(cx, pkg, kind);
+ unit.pkg.package_id()));
+ let new = dir(cx, unit);
let loc = new.join("build");
- info!("fingerprint at: {}", loc.display());
+ debug!("fingerprint at: {}", loc.display());
- let new_fingerprint = try!(calculate_build_cmd_fingerprint(cx, pkg));
+ let new_fingerprint = try!(calculate_pkg_fingerprint(cx, unit.pkg));
let new_fingerprint = Arc::new(FingerprintInner {
extra: String::new(),
deps: Vec::new(),
}
/// Prepare work for when a package starts to build
-pub fn prepare_init(cx: &mut Context, pkg: &Package, kind: Kind)
- -> (Work, Work) {
- let new1 = dir(cx, pkg, kind);
+pub fn prepare_init(cx: &mut Context, unit: &Unit) -> CargoResult<()> {
+ let new1 = dir(cx, unit);
let new2 = new1.clone();
- let work1 = Work::new(move |_| {
- if fs::metadata(&new1).is_err() {
- try!(fs::create_dir(&new1));
- }
- Ok(())
- });
- let work2 = Work::new(move |_| {
- if fs::metadata(&new2).is_err() {
- try!(fs::create_dir(&new2));
- }
- Ok(())
- });
-
- (work1, work2)
+ if fs::metadata(&new1).is_err() {
+ try!(fs::create_dir(&new1));
+ }
+ if fs::metadata(&new2).is_err() {
+ try!(fs::create_dir(&new2));
+ }
+ Ok(())
}
/// Given the data to build and write a fingerprint, generate some Work
}
/// Return the (old, new) location for fingerprints for a package
-pub fn dir(cx: &Context, pkg: &Package, kind: Kind) -> PathBuf {
- cx.layout(pkg, kind).proxy().fingerprint(pkg)
+pub fn dir(cx: &Context, unit: &Unit) -> PathBuf {
+ cx.layout(unit.pkg, unit.kind).proxy().fingerprint(unit.pkg)
}
/// Returns the (old, new) location for the dep info file of a target.
-pub fn dep_info_loc(cx: &Context, pkg: &Package, target: &Target,
- profile: &Profile, kind: Kind) -> PathBuf {
- dir(cx, pkg, kind).join(&format!("dep-{}", filename(target, profile)))
+pub fn dep_info_loc(cx: &Context, unit: &Unit) -> PathBuf {
+ dir(cx, unit).join(&format!("dep-{}", filename(unit)))
}
fn is_fresh(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult<bool> {
Ok(Some(mtime))
}
-fn calculate_build_cmd_fingerprint(cx: &Context, pkg: &Package)
- -> CargoResult<String> {
- // TODO: this should be scoped to just the `build` directory, not the entire
- // package.
- calculate_pkg_fingerprint(cx, pkg)
-}
-
-fn calculate_pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> {
+fn calculate_pkg_fingerprint(cx: &Context,
+ pkg: &Package) -> CargoResult<String> {
let source = cx.sources
.get(pkg.package_id().source_id())
.expect("BUG: Missing package source");
source.fingerprint(pkg)
}
-fn filename(target: &Target, profile: &Profile) -> String {
- let kind = if target.is_lib() {"lib"} else {"bin"};
- let flavor = if target.is_test() || profile.test {
+fn filename(unit: &Unit) -> String {
+ let kind = match *unit.target.kind() {
+ TargetKind::Lib(..) => "lib",
+ TargetKind::Bin => "bin",
+ TargetKind::Test => "integration-test",
+ TargetKind::Example => "example",
+ TargetKind::Bench => "bench",
+ TargetKind::CustomBuild => "build-script",
+ };
+ let flavor = if unit.profile.test {
"test-"
- } else if profile.doc {
+ } else if unit.profile.doc {
"doc-"
} else {
""
};
- format!("{}{}-{}", flavor, kind, target.name())
+ format!("{}{}-{}", flavor, kind, unit.target.name())
}
// The dep-info files emitted by the compiler all have their listed paths
use std::sync::mpsc::Sender;
+use std::fmt;
use util::{CargoResult, Fresh, Dirty, Freshness};
pub fn call(self, tx: Sender<String>) -> CargoResult<()> {
self.inner.call_box(tx)
}
+
+ pub fn then(self, next: Work) -> Work {
+ Work::new(move |tx| {
+ try!(self.call(tx.clone()));
+ next.call(tx)
+ })
+ }
}
impl Job {
}
}
}
+
+impl fmt::Debug for Job {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "Job {{ ... }}")
+ }
+}
use std::collections::HashSet;
use std::collections::hash_map::HashMap;
+use std::fmt;
use std::sync::mpsc::{channel, Sender, Receiver};
-use threadpool::ThreadPool;
+use crossbeam::{self, Scope};
use term::color::YELLOW;
-use core::{Package, PackageId, Resolve, PackageSet};
+use core::{PackageId, Target, Profile};
use util::{Config, DependencyQueue, Fresh, Dirty, Freshness};
-use util::{CargoResult, Dependency, profile};
+use util::{CargoResult, Dependency, profile, internal};
+use super::{Context, Kind, Unit};
use super::job::Job;
/// A management structure of the entire dependency graph to compile.
/// actual compilation step of each package. Packages enqueue units of work and
/// then later on the entire graph is processed and compiled.
pub struct JobQueue<'a> {
- pool: ThreadPool,
- queue: DependencyQueue<(&'a PackageId, Stage),
- (&'a Package, Vec<(Job, Freshness)>)>,
- tx: Sender<Message>,
- rx: Receiver<Message>,
- resolve: &'a Resolve,
- packages: &'a PackageSet,
- active: u32,
- pending: HashMap<(&'a PackageId, Stage), PendingBuild>,
- pkgids: HashSet<&'a PackageId>,
+ jobs: usize,
+ queue: DependencyQueue<Key<'a>, Vec<(Job, Freshness)>>,
+ tx: Sender<Message<'a>>,
+ rx: Receiver<Message<'a>>,
+ active: usize,
+ pending: HashMap<Key<'a>, PendingBuild>,
printed: HashSet<&'a PackageId>,
+ counts: HashMap<&'a PackageId, usize>,
}
/// A helper structure for metadata about the state of a building package.
struct PendingBuild {
/// Number of jobs currently active
- amt: u32,
+ amt: usize,
/// Current freshness state of this package. Any dirty target within a
/// package will cause the entire package to become dirty.
fresh: Freshness,
}
-/// Current stage of compilation for an individual package.
-///
-/// This is the second layer of keys on the dependency queue to track the state
-/// of where a particular package is in the compilation pipeline. Each of these
-/// stages has a network of dependencies among them, outlined by the
-/// `Dependency` implementation found below.
-///
-/// Each build step for a package is registered with one of these stages, and
-/// each stage has a vector of work to perform in parallel.
-#[derive(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Debug, Copy)]
-pub enum Stage {
- Start,
- BuildCustomBuild,
- RunCustomBuild,
- Libraries,
- Binaries,
- LibraryTests,
- BinaryTests,
- End,
+#[derive(Clone, Copy, Eq, PartialEq, Hash)]
+struct Key<'a> {
+ pkg: &'a PackageId,
+ target: &'a Target,
+ profile: &'a Profile,
+ kind: Kind,
}
-type Message = (PackageId, Stage, Freshness, CargoResult<()>);
+struct Message<'a> {
+ key: Key<'a>,
+ result: CargoResult<()>,
+}
impl<'a> JobQueue<'a> {
- pub fn new(resolve: &'a Resolve, packages: &'a PackageSet, jobs: u32)
- -> JobQueue<'a> {
+ pub fn new<'cfg>(cx: &Context<'a, 'cfg>) -> JobQueue<'a> {
let (tx, rx) = channel();
JobQueue {
- pool: ThreadPool::new(jobs as usize),
+ jobs: cx.jobs() as usize,
queue: DependencyQueue::new(),
tx: tx,
rx: rx,
- resolve: resolve,
- packages: packages,
active: 0,
pending: HashMap::new(),
- pkgids: HashSet::new(),
printed: HashSet::new(),
+ counts: HashMap::new(),
}
}
- pub fn queue(&mut self, pkg: &'a Package, stage: Stage)
- -> &mut Vec<(Job, Freshness)> {
- self.pkgids.insert(pkg.package_id());
- &mut self.queue.queue(&(self.resolve, self.packages), Fresh,
- (pkg.package_id(), stage),
- (pkg, Vec::new())).1
+ pub fn enqueue(&mut self, cx: &Context<'a, 'a>,
+ unit: &Unit<'a>, job: Job, fresh: Freshness) {
+ let key = Key::new(unit);
+ self.queue.queue(cx, Fresh, key, Vec::new()).push((job, fresh));
+ *self.counts.entry(key.pkg).or_insert(0) += 1;
}
/// Execute all jobs necessary to build the dependency graph.
pub fn execute(&mut self, config: &Config) -> CargoResult<()> {
let _p = profile::start("executing the job graph");
- // Iteratively execute the dependency graph. Each turn of this loop will
- // schedule as much work as possible and then wait for one job to finish,
- // possibly scheduling more work afterwards.
- while self.queue.len() > 0 {
- loop {
- match self.queue.dequeue() {
- Some((fresh, (_, stage), (pkg, jobs))) => {
- info!("start: {} {:?}", pkg, stage);
- try!(self.run(pkg, stage, fresh, jobs, config));
- }
- None => break,
+ crossbeam::scope(|scope| {
+ self.drain_the_queue(config, scope)
+ })
+ }
+
+ fn drain_the_queue(&mut self, config: &Config, scope: &Scope<'a>)
+ -> CargoResult<()> {
+ let mut queue = Vec::new();
+ trace!("queue: {:#?}", self.queue);
+
+ // Iteratively execute the entire dependency graph. Each turn of the
+ // loop starts out by scheduling as much work as possible (up to the
+ // maximum number of parallel jobs). A local queue is maintained
+ // separately from the main dependency queue as one dequeue may actually
+ // dequeue quite a bit of work (e.g. 10 binaries in one project).
+ //
+ // After a job has finished we update our internal state if it was
+ // successful and otherwise wait for pending work to finish if it failed
+ // and then immediately return.
+ loop {
+ while self.active < self.jobs {
+ if queue.len() > 0 {
+ let (key, job, fresh) = queue.remove(0);
+ try!(self.run(key, fresh, job, config, scope));
+ } else if let Some((fresh, key, jobs)) = self.queue.dequeue() {
+ let total_fresh = jobs.iter().fold(fresh, |fresh, &(_, f)| {
+ f.combine(fresh)
+ });
+ self.pending.insert(key, PendingBuild {
+ amt: jobs.len(),
+ fresh: total_fresh,
+ });
+ queue.extend(jobs.into_iter().map(|(job, f)| {
+ (key, job, f.combine(fresh))
+ }));
+ } else {
+ break
}
}
+ if self.active == 0 {
+ break
+ }
// Now that all possible work has been scheduled, wait for a piece
// of work to finish. If any package fails to build then we stop
// scheduling work as quickly as possibly.
- let (id, stage, fresh, result) = self.rx.recv().unwrap();
- info!(" end: {} {:?}", id, stage);
- let id = *self.pkgids.iter().find(|&k| *k == &id).unwrap();
+ let msg = self.rx.recv().unwrap();
+ info!("end: {:?}", msg.key);
self.active -= 1;
- match result {
+ match msg.result {
Ok(()) => {
- let state = self.pending.get_mut(&(id, stage)).unwrap();
+ let state = self.pending.get_mut(&msg.key).unwrap();
state.amt -= 1;
- state.fresh = state.fresh.combine(fresh);
if state.amt == 0 {
- self.queue.finish(&(id, stage), state.fresh);
+ self.queue.finish(&msg.key, state.fresh);
}
}
Err(e) => {
}
}
- trace!("rustc jobs completed");
-
- Ok(())
+ if self.queue.len() == 0 {
+ Ok(())
+ } else {
+ debug!("queue: {:#?}", self.queue);
+ Err(internal("finished with jobs still left in the queue"))
+ }
}
- /// Execute a stage of compilation for a package.
- ///
- /// The input freshness is from `dequeue()` and indicates the combined
- /// freshness of all upstream dependencies. This function will schedule all
- /// work in `jobs` to be executed.
- fn run(&mut self, pkg: &'a Package, stage: Stage, fresh: Freshness,
- jobs: Vec<(Job, Freshness)>, config: &Config) -> CargoResult<()> {
- let njobs = jobs.len();
- let amt = if njobs == 0 {1} else {njobs as u32};
- let id = pkg.package_id().clone();
-
- // While the jobs are all running, we maintain some metadata about how
- // many are running, the current state of freshness (of all the combined
- // jobs), and the stage to pass to finish() later on.
- self.active += amt;
- self.pending.insert((pkg.package_id(), stage), PendingBuild {
- amt: amt,
- fresh: fresh,
- });
+ /// Executes a job in the `scope` given, pushing the spawned thread's
+ /// handled onto `threads`.
+ fn run(&mut self,
+ key: Key<'a>,
+ fresh: Freshness,
+ job: Job,
+ config: &Config,
+ scope: &Scope<'a>) -> CargoResult<()> {
+ info!("start: {:?}", key);
- let mut total_fresh = fresh;
- let mut running = Vec::new();
- debug!("start {:?} at {:?} for {}", total_fresh, stage, pkg);
- for (job, job_freshness) in jobs.into_iter() {
- debug!("job: {:?} ({:?})", job_freshness, total_fresh);
- let fresh = job_freshness.combine(fresh);
- total_fresh = total_fresh.combine(fresh);
- let my_tx = self.tx.clone();
- let id = id.clone();
- let (desc_tx, desc_rx) = channel();
- self.pool.execute(move|| {
- my_tx.send((id, stage, fresh, job.run(fresh, desc_tx))).unwrap();
- });
- // only the first message of each job is processed
- match desc_rx.recv() {
- Ok(msg) => running.push(msg),
- Err(..) => {}
- }
- }
+ self.active += 1;
+ *self.counts.get_mut(key.pkg).unwrap() -= 1;
- // If no work was scheduled, make sure that a message is actually send
- // on this channel.
- if njobs == 0 {
- self.tx.send((id, stage, fresh, Ok(()))).unwrap();
- }
+ let my_tx = self.tx.clone();
+ let (desc_tx, desc_rx) = channel();
+ scope.spawn(move || {
+ my_tx.send(Message {
+ key: key,
+ result: job.run(fresh, desc_tx),
+ }).unwrap();
+ });
// Print out some nice progress information
- try!(self.note_working_on(config, pkg.package_id(), stage, total_fresh,
- running.len()));
- for msg in running.iter() {
- try!(config.shell().verbose(|c| c.status("Running", msg)));
+ try!(self.note_working_on(config, key.pkg, fresh));
+
+ // only the first message of each job is processed
+ if let Ok(msg) = desc_rx.recv() {
+ try!(config.shell().verbose(|c| c.status("Running", &msg)));
}
Ok(())
}
// run for a package, regardless of when that is. We then don't print
// out any more information for a package after we've printed it once.
fn note_working_on(&mut self, config: &Config, pkg: &'a PackageId,
- stage: Stage, fresh: Freshness, cmds_run: usize)
- -> CargoResult<()> {
- if self.printed.contains(&pkg) { return Ok(()) }
+ fresh: Freshness) -> CargoResult<()> {
+ if self.printed.contains(&pkg) {
+ return Ok(())
+ }
match fresh {
// Any dirty stage which runs at least one command gets printed as
// being a compiled package
- Dirty if cmds_run == 0 => {}
Dirty => {
self.printed.insert(pkg);
try!(config.shell().status("Compiling", pkg));
}
- Fresh if stage == Stage::End => {
+ Fresh if self.counts[pkg] == 0 => {
self.printed.insert(pkg);
try!(config.shell().verbose(|c| c.status("Fresh", pkg)));
}
}
}
-impl<'a> Dependency for (&'a PackageId, Stage) {
- type Context = (&'a Resolve, &'a PackageSet);
-
- fn dependencies(&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet))
- -> Vec<(&'a PackageId, Stage)> {
- // This implementation of `Dependency` is the driver for the structure
- // of the dependency graph of packages to be built. The "key" here is
- // a pair of the package being built and the stage that it's at.
- //
- // Each stage here lists dependencies on the previous stages except for
- // the start state which depends on the ending state of all dependent
- // packages (as determined by the resolve context).
- let (id, stage) = *self;
- let pkg = packages.iter().find(|p| p.package_id() == id).unwrap();
- let deps = resolve.deps(id).into_iter().flat_map(|a| a)
- .filter(|dep| *dep != id);
- match stage {
- Stage::Start => Vec::new(),
-
- // Building the build command itself starts off pretty easily,we
- // just need to depend on all of the library stages of our own build
- // dependencies (making them available to us).
- Stage::BuildCustomBuild => {
- let mut base = vec![(id, Stage::Start)];
- base.extend(deps.filter(|id| {
- pkg.dependencies().iter().any(|d| {
- d.name() == id.name() && d.is_build()
- })
- }).map(|id| (id, Stage::Libraries)));
- base
- }
-
- // When running a custom build command, we need to be sure that our
- // own custom build command is actually built, and then we need to
- // wait for all our dependencies to finish their custom build
- // commands themselves (as they may provide input to us).
- Stage::RunCustomBuild => {
- let mut base = vec![(id, Stage::BuildCustomBuild)];
- base.extend(deps.filter(|id| {
- pkg.dependencies().iter().any(|d| {
- d.name() == id.name() && d.is_transitive()
- })
- }).map(|id| (id, Stage::RunCustomBuild)));
- base
- }
-
- // Building a library depends on our own custom build command plus
- // all our transitive dependencies.
- Stage::Libraries => {
- let mut base = vec![(id, Stage::RunCustomBuild)];
- base.extend(deps.filter(|id| {
- pkg.dependencies().iter().any(|d| {
- d.name() == id.name() && d.is_transitive()
- })
- }).map(|id| (id, Stage::Libraries)));
- base
- }
-
- // Binaries only depend on libraries being available. Note that they
- // do not depend on dev-dependencies.
- Stage::Binaries => vec![(id, Stage::Libraries)],
+impl<'a> Dependency for Key<'a> {
+ type Context = Context<'a, 'a>;
- // Tests depend on all dependencies (including dev-dependencies) in
- // addition to the library stage for this package. Note, however,
- // that library tests only need to depend the custom build command
- // being run, not the libraries themselves.
- Stage::BinaryTests | Stage::LibraryTests => {
- let mut base = if stage == Stage::BinaryTests {
- vec![(id, Stage::Libraries)]
- } else {
- vec![(id, Stage::RunCustomBuild)]
- };
- base.extend(deps.map(|id| (id, Stage::Libraries)));
- base
+ fn dependencies(&self, cx: &Context<'a, 'a>) -> Vec<Key<'a>> {
+ let unit = Unit {
+ pkg: cx.get_package(self.pkg),
+ target: self.target,
+ profile: self.profile,
+ kind: self.kind,
+ };
+ cx.dep_targets(&unit).iter().filter_map(|unit| {
+ // Binaries aren't actually needed to *compile* tests, just to run
+ // them, so we don't include this dependency edge in the job graph.
+ if self.target.is_test() && unit.target.is_bin() {
+ None
+ } else {
+ Some(Key::new(unit))
}
+ }).collect()
+ }
+}
- // A marker stage to indicate when a package has entirely finished
- // compiling, nothing is actually built as part of this stage.
- Stage::End => {
- vec![(id, Stage::Binaries), (id, Stage::BinaryTests),
- (id, Stage::LibraryTests)]
- }
+impl<'a> Key<'a> {
+ fn new(unit: &Unit<'a>) -> Key<'a> {
+ Key {
+ pkg: unit.pkg.package_id(),
+ target: unit.target,
+ profile: unit.profile,
+ kind: unit.kind,
}
}
}
+
+impl<'a> fmt::Debug for Key<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{} => {}/{} => {:?}", self.pkg, self.target, self.profile,
+ self.kind)
+ }
+}
-use std::collections::{HashSet, HashMap};
+use std::collections::HashMap;
use std::env;
use std::ffi::{OsStr, OsString};
use std::fs;
use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve};
use core::{Profile, Profiles};
use util::{self, CargoResult, human};
-use util::{Config, internal, ChainError, Fresh, profile, join_paths};
+use util::{Config, internal, ChainError, profile, join_paths};
use self::job::{Job, Work};
-use self::job_queue::{JobQueue, Stage};
+use self::job_queue::JobQueue;
pub use self::compilation::Compilation;
-pub use self::context::Context;
-pub use self::context::Platform;
+pub use self::context::{Context, Unit};
pub use self::engine::{CommandPrototype, CommandType, ExecEngine, ProcessEngine};
pub use self::layout::{Layout, LayoutProxy};
-pub use self::custom_build::{BuildOutput, BuildMap};
+pub use self::custom_build::{BuildOutput, BuildMap, BuildScripts};
mod context;
mod compilation;
build_config: BuildConfig,
profiles: &'a Profiles)
-> CargoResult<Compilation<'cfg>> {
-
+ let units = pkg_targets.iter().flat_map(|&(pkg, ref targets)| {
+ let default_kind = if build_config.requested_target.is_some() {
+ Kind::Target
+ } else {
+ Kind::Host
+ };
+ targets.iter().map(move |&(target, profile)| {
+ Unit {
+ pkg: pkg,
+ target: target,
+ profile: profile,
+ kind: if target.for_host() {Kind::Host} else {default_kind},
+ }
+ })
+ }).collect::<Vec<_>>();
try!(links::validate(deps));
let dest = if build_config.release {"release"} else {"debug"};
host_layout, target_layout,
build_config, profiles));
- let mut queue = JobQueue::new(cx.resolve, deps, cx.jobs());
+ let mut queue = JobQueue::new(&cx);
- {
- let _p = profile::start("preparing build directories");
- // Prep the context's build requirements and see the job graph for all
- // packages initially.
-
-
- try!(cx.prepare(root, pkg_targets));
- let mut visited = HashSet::new();
- for &(pkg, _) in pkg_targets {
- prepare_init(&mut cx, pkg, &mut queue, &mut visited);
- }
- custom_build::build_map(&mut cx, pkg_targets);
- }
+ try!(cx.prepare(root));
+ custom_build::build_map(&mut cx, &units);
- for &(pkg, ref targets) in pkg_targets {
+ for unit in units.iter() {
// Build up a list of pending jobs, each of which represent
// compiling a particular package. No actual work is executed as
// part of this, that's all done next as part of the `execute`
// function which will run everything in order with proper
// parallelism.
- try!(compile(targets, pkg, &mut cx, &mut queue));
+ try!(compile(&mut cx, &mut queue, unit));
}
// Now that we've figured out everything that we're going to do, do it!
try!(queue.execute(cx.config));
- for &(pkg, ref targets) in pkg_targets.iter() {
- let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg)
+ for unit in units.iter() {
+ let out_dir = cx.layout(unit.pkg, unit.kind).build_out(unit.pkg)
.display().to_string();
- cx.compilation.extra_env.insert("OUT_DIR".to_string(), out_dir);
-
- let mut tests = vec![];
-
- for &(target, profile) in targets {
- let kind = Kind::from(target);
- for filename in try!(cx.target_filenames(pkg, target, profile,
- kind)).iter() {
- let dst = cx.out_dir(pkg, kind, target).join(filename);
- if profile.test {
- tests.push((target.name().to_string(), dst));
- } else if target.is_bin() || target.is_example() {
- cx.compilation.binaries.push(dst);
- } else if target.is_lib() {
- let pkgid = pkg.package_id().clone();
- cx.compilation.libraries.entry(pkgid).or_insert(Vec::new())
- .push((target.clone(), dst));
+ cx.compilation.extra_env.entry(unit.pkg.package_id().clone())
+ .or_insert(Vec::new())
+ .push(("OUT_DIR".to_string(), out_dir));
+
+ for filename in try!(cx.target_filenames(unit)).iter() {
+ let dst = cx.out_dir(unit).join(filename);
+ if unit.profile.test {
+ cx.compilation.tests.push((unit.pkg.clone(),
+ unit.target.name().to_string(),
+ dst));
+ } else if unit.target.is_bin() || unit.target.is_example() {
+ cx.compilation.binaries.push(dst);
+ } else if unit.target.is_lib() {
+ let pkgid = unit.pkg.package_id().clone();
+ cx.compilation.libraries.entry(pkgid).or_insert(Vec::new())
+ .push((unit.target.clone(), dst));
+ }
+ if !unit.target.is_lib() { continue }
+
+ // Include immediate lib deps as well
+ for unit in cx.dep_targets(unit).iter() {
+ let pkgid = unit.pkg.package_id();
+ if !unit.target.is_lib() { continue }
+ if unit.profile.doc { continue }
+ if cx.compilation.libraries.contains_key(&pkgid) {
+ continue
}
- if !target.is_lib() { continue }
-
- // Include immediate lib deps as well
- for dep in &cx.dep_targets(pkg, target, kind, profile) {
- let (pkg, target, profile) = *dep;
- let pkgid = pkg.package_id();
- if !target.is_lib() { continue }
- if profile.doc { continue }
- if cx.compilation.libraries.contains_key(&pkgid) {
- continue
- }
- let kind = kind.for_target(target);
- let v = try!(cx.target_filenames(pkg, target, profile, kind));
- let v = v.into_iter().map(|f| {
- (target.clone(), cx.out_dir(pkg, kind, target).join(f))
- }).collect::<Vec<_>>();
- cx.compilation.libraries.insert(pkgid.clone(), v);
- }
+ let v = try!(cx.target_filenames(unit));
+ let v = v.into_iter().map(|f| {
+ (unit.target.clone(), cx.out_dir(unit).join(f))
+ }).collect::<Vec<_>>();
+ cx.compilation.libraries.insert(pkgid.clone(), v);
}
}
-
- cx.compilation.tests.push((pkg.clone(), tests));
-
}
if let Some(feats) = cx.resolve.features(root.package_id()) {
Ok(cx.compilation)
}
-fn compile<'a, 'cfg>(targets: &[(&'a Target, &'a Profile)],
- pkg: &'a Package,
- cx: &mut Context<'a, 'cfg>,
- jobs: &mut JobQueue<'a>) -> CargoResult<()> {
- debug!("compile_pkg; pkg={}", pkg);
-
- // For each target/profile run the compiler or rustdoc accordingly. After
- // having done so we enqueue the job in the right portion of the dependency
- // graph and then move on to the next.
- //
- // This loop also takes care of enqueueing the work needed to actually run
- // the custom build commands as well.
- for &(target, profile) in targets {
- if !cx.compiled.insert((pkg.package_id(), target, profile)) {
- continue
- }
+fn compile<'a, 'cfg: 'a>(cx: &mut Context<'a, 'cfg>,
+ jobs: &mut JobQueue<'a>,
+ unit: &Unit<'a>) -> CargoResult<()> {
+ if !cx.compiled.insert(*unit) {
+ return Ok(())
+ }
- let profiling_marker = profile::start(format!("preparing: {}/{}",
- pkg, target.name()));
- let work = if profile.doc {
- let rustdoc = try!(rustdoc(pkg, target, profile, cx));
- vec![(rustdoc, Kind::Target)]
+ // Build up the work to be done to compile this unit, enqueuing it once
+ // we've got everything constructed.
+ let p = profile::start(format!("preparing: {}/{}", unit.pkg,
+ unit.target.name()));
+ try!(fingerprint::prepare_init(cx, unit));
+
+ let (dirty, fresh, freshness) = if unit.profile.run_custom_build {
+ try!(custom_build::prepare(cx, unit))
+ } else {
+ let (freshness, dirty, fresh) = try!(fingerprint::prepare_target(cx,
+ unit));
+ let work = if unit.profile.doc {
+ try!(rustdoc(cx, unit))
} else {
- let req = cx.get_requirement(pkg, target);
- try!(rustc(pkg, target, profile, cx, req))
+ try!(rustc(cx, unit))
};
+ let dirty = work.then(dirty);
+ (dirty, fresh, freshness)
+ };
+ jobs.enqueue(cx, unit, Job::new(dirty, fresh), freshness);
+ drop(p);
- let kinds = work.iter().map(|&(_, kind)| kind).collect::<Vec<_>>();
-
- for (work, kind) in work {
- let (freshness, dirty, fresh) =
- try!(fingerprint::prepare_target(cx, pkg, target, profile, kind));
-
- let dirty = Work::new(move |desc_tx| {
- try!(work.call(desc_tx.clone()));
- dirty.call(desc_tx)
- });
-
- // Figure out what stage this work will go into
- let dst = match (target.is_lib(),
- profile.test,
- target.is_custom_build()) {
- (_, _, true) => jobs.queue(pkg, Stage::BuildCustomBuild),
- (true, true, _) => jobs.queue(pkg, Stage::LibraryTests),
- (false, true, _) => jobs.queue(pkg, Stage::BinaryTests),
- (true, false, _) => jobs.queue(pkg, Stage::Libraries),
- (false, false, _) if !target.is_bin() => {
- jobs.queue(pkg, Stage::BinaryTests)
- }
- (false, false, _) => jobs.queue(pkg, Stage::Binaries),
- };
- dst.push((Job::new(dirty, fresh), freshness));
-
- }
- drop(profiling_marker);
-
- // Be sure to compile all dependencies of this target as well.
- for kind in kinds {
- for (pkg, target, p) in cx.dep_targets(pkg, target, kind, profile) {
- try!(compile(&[(target, p)], pkg, cx, jobs));
- }
- }
-
- // If this is a custom build command, we need to not only build the
- // script but we also need to run it. Note that this is a little nuanced
- // because we may need to run the build script multiple times. If the
- // package is needed in both a host and target context, we need to run
- // it once per context.
- if !target.is_custom_build() { continue }
- let mut reqs = Vec::new();
- let requirement = pkg.targets().iter().filter(|t| !t.is_custom_build())
- .fold(None::<Platform>, |req, t| {
- let r2 = cx.get_requirement(pkg, t);
- req.map(|r| r.combine(r2)).or(Some(r2))
- }).unwrap_or(Platform::Target);
- match requirement {
- Platform::Target => reqs.push(Platform::Target),
- Platform::Plugin => reqs.push(Platform::Plugin),
- Platform::PluginAndTarget => {
- if cx.requested_target().is_some() {
- reqs.push(Platform::Plugin);
- reqs.push(Platform::Target);
- } else {
- reqs.push(Platform::PluginAndTarget);
- }
- }
- }
- let before = jobs.queue(pkg, Stage::RunCustomBuild).len();
- for &req in reqs.iter() {
- let kind = match req {
- Platform::Plugin => Kind::Host,
- _ => Kind::Target,
- };
- let key = (pkg.package_id().clone(), kind);
- if pkg.manifest().links().is_some() &&
- cx.build_state.outputs.lock().unwrap().contains_key(&key) {
- continue
- }
- let (dirty, fresh, freshness) =
- try!(custom_build::prepare(pkg, target, req, cx));
- let run_custom = jobs.queue(pkg, Stage::RunCustomBuild);
- run_custom.push((Job::new(dirty, fresh), freshness));
- }
-
- // If we didn't actually run the custom build command, then there's no
- // need to compile it.
- if jobs.queue(pkg, Stage::RunCustomBuild).len() == before {
- jobs.queue(pkg, Stage::BuildCustomBuild).pop();
- }
+ // Be sure to compile all dependencies of this target as well.
+ for unit in cx.dep_targets(unit).iter() {
+ try!(compile(cx, jobs, unit));
}
-
Ok(())
}
-fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>,
- pkg: &'a Package,
- jobs: &mut JobQueue<'a>,
- visited: &mut HashSet<&'a PackageId>) {
- if !visited.insert(pkg.package_id()) { return }
-
- // Set up all dependencies
- for dep in cx.resolve.deps(pkg.package_id()).into_iter().flat_map(|a| a) {
- let dep = cx.get_package(dep);
- prepare_init(cx, dep, jobs, visited);
+fn rustc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
+ let crate_types = unit.target.rustc_crate_types();
+ let mut rustc = try!(prepare_rustc(cx, crate_types, unit));
+
+ let name = unit.pkg.name().to_string();
+ let is_path_source = unit.pkg.package_id().source_id().is_path();
+ let allow_warnings = unit.pkg.package_id() == cx.resolve.root() ||
+ is_path_source;
+ if !allow_warnings {
+ if cx.config.rustc_info().cap_lints {
+ rustc.arg("--cap-lints").arg("allow");
+ } else {
+ rustc.arg("-Awarnings");
+ }
}
+ let has_custom_args = unit.profile.rustc_args.is_some();
+ let exec_engine = cx.exec_engine.clone();
- // Initialize blank queues for each stage
- jobs.queue(pkg, Stage::BuildCustomBuild);
- jobs.queue(pkg, Stage::RunCustomBuild);
- jobs.queue(pkg, Stage::Libraries);
- jobs.queue(pkg, Stage::Binaries);
- jobs.queue(pkg, Stage::LibraryTests);
- jobs.queue(pkg, Stage::BinaryTests);
- jobs.queue(pkg, Stage::End);
-
- // Prepare the fingerprint directory as the first step of building a package
- let (target1, target2) = fingerprint::prepare_init(cx, pkg, Kind::Target);
- let init = jobs.queue(pkg, Stage::Start);
- if cx.requested_target().is_some() {
- let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg,
- Kind::Host);
- init.push((Job::new(plugin1, plugin2), Fresh));
- }
- init.push((Job::new(target1, target2), Fresh));
-}
+ let filenames = try!(cx.target_filenames(unit));
+ let root = cx.out_dir(unit);
-fn rustc(package: &Package, target: &Target, profile: &Profile,
- cx: &mut Context, req: Platform)
- -> CargoResult<Vec<(Work, Kind)> >{
- let crate_types = target.rustc_crate_types();
- let rustcs = try!(prepare_rustc(package, target, profile, crate_types,
- cx, req));
-
- let plugin_deps = load_build_deps(cx, package, target, profile, Kind::Host);
-
- return rustcs.into_iter().map(|(mut rustc, kind)| {
- let name = package.name().to_string();
- let is_path_source = package.package_id().source_id().is_path();
- let allow_warnings = package.package_id() == cx.resolve.root() ||
- is_path_source;
- if !allow_warnings {
- if cx.config.rustc_info().cap_lints {
- rustc.arg("--cap-lints").arg("allow");
- } else {
- rustc.arg("-Awarnings");
- }
- }
- let has_custom_args = profile.rustc_args.is_some();
- let exec_engine = cx.exec_engine.clone();
-
- let filenames = try!(cx.target_filenames(package, target, profile,
- kind));
- let root = cx.out_dir(package, kind, target);
-
- // Prepare the native lib state (extra -L and -l flags)
- let build_state = cx.build_state.clone();
- let current_id = package.package_id().clone();
- let plugin_deps = plugin_deps.clone();
- let mut native_lib_deps = load_build_deps(cx, package, target, profile,
- kind);
- if package.has_custom_build() && !target.is_custom_build() {
- native_lib_deps.insert(0, current_id.clone());
- }
+ // Prepare the native lib state (extra -L and -l flags)
+ let build_state = cx.build_state.clone();
+ let current_id = unit.pkg.package_id().clone();
+ let build_deps = load_build_deps(cx, unit);
- // If we are a binary and the package also contains a library, then we
- // don't pass the `-l` flags.
- let pass_l_flag = target.is_lib() || !package.targets().iter().any(|t| {
- t.is_lib()
- });
- let do_rename = target.allows_underscores() && !profile.test;
- let real_name = target.name().to_string();
- let crate_name = target.crate_name();
+ // If we are a binary and the package also contains a library, then we
+ // don't pass the `-l` flags.
+ let pass_l_flag = unit.target.is_lib() ||
+ !unit.pkg.targets().iter().any(|t| t.is_lib());
+ let do_rename = unit.target.allows_underscores() && !unit.profile.test;
+ let real_name = unit.target.name().to_string();
+ let crate_name = unit.target.crate_name();
- let rustc_dep_info_loc = if do_rename {
- root.join(&crate_name)
- } else {
- root.join(&cx.file_stem(package, target, profile))
- }.with_extension("d");
- let dep_info_loc = fingerprint::dep_info_loc(cx, package, target,
- profile, kind);
- let cwd = cx.config.cwd().to_path_buf();
-
- Ok((Work::new(move |desc_tx| {
- debug!("about to run: {}", rustc);
-
- // Only at runtime have we discovered what the extra -L and -l
- // arguments are for native libraries, so we process those here. We
- // also need to be sure to add any -L paths for our plugins to the
- // dynamic library load path as a plugin's dynamic library may be
- // located somewhere in there.
+ let rustc_dep_info_loc = if do_rename {
+ root.join(&crate_name)
+ } else {
+ root.join(&cx.file_stem(unit))
+ }.with_extension("d");
+ let dep_info_loc = fingerprint::dep_info_loc(cx, unit);
+ let cwd = cx.config.cwd().to_path_buf();
+
+ return Ok(Work::new(move |desc_tx| {
+ debug!("about to run: {}", rustc);
+
+ // Only at runtime have we discovered what the extra -L and -l
+ // arguments are for native libraries, so we process those here. We
+ // also need to be sure to add any -L paths for our plugins to the
+ // dynamic library load path as a plugin's dynamic library may be
+ // located somewhere in there.
+ if let Some(build_deps) = build_deps {
let build_state = build_state.outputs.lock().unwrap();
- add_native_deps(&mut rustc, &build_state, native_lib_deps,
- kind, pass_l_flag, ¤t_id);
- try!(add_plugin_deps(&mut rustc, &build_state, plugin_deps));
- drop(build_state);
-
- // FIXME(rust-lang/rust#18913): we probably shouldn't have to do
- // this manually
- for filename in filenames.iter() {
- let dst = root.join(filename);
- if fs::metadata(&dst).is_ok() {
- try!(fs::remove_file(&dst));
- }
- }
-
- desc_tx.send(rustc.to_string()).ok();
- try!(exec_engine.exec(rustc).chain_error(|| {
- human(format!("Could not compile `{}`.", name))
- }));
+ try!(add_native_deps(&mut rustc, &build_state, &build_deps,
+ pass_l_flag, ¤t_id));
+ try!(add_plugin_deps(&mut rustc, &build_state, &build_deps));
+ }
- if do_rename && real_name != crate_name {
- let dst = root.join(&filenames[0]);
- let src = dst.with_file_name(dst.file_name().unwrap()
- .to_str().unwrap()
- .replace(&real_name, &crate_name));
- if !has_custom_args || fs::metadata(&src).is_ok() {
- try!(fs::rename(&src, &dst).chain_error(|| {
- internal(format!("could not rename crate {:?}", src))
- }));
- }
+ // FIXME(rust-lang/rust#18913): we probably shouldn't have to do
+ // this manually
+ for filename in filenames.iter() {
+ let dst = root.join(filename);
+ if fs::metadata(&dst).is_ok() {
+ try!(fs::remove_file(&dst));
}
+ }
- if !has_custom_args || fs::metadata(&rustc_dep_info_loc).is_ok() {
- try!(fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| {
- internal(format!("could not rename dep info: {:?}",
- rustc_dep_info_loc))
+ desc_tx.send(rustc.to_string()).ok();
+ try!(exec_engine.exec(rustc).chain_error(|| {
+ human(format!("Could not compile `{}`.", name))
+ }));
+
+ if do_rename && real_name != crate_name {
+ let dst = root.join(&filenames[0]);
+ let src = dst.with_file_name(dst.file_name().unwrap()
+ .to_str().unwrap()
+ .replace(&real_name, &crate_name));
+ if !has_custom_args || fs::metadata(&src).is_ok() {
+ try!(fs::rename(&src, &dst).chain_error(|| {
+ internal(format!("could not rename crate {:?}", src))
}));
- try!(fingerprint::append_current_dir(&dep_info_loc, &cwd));
}
+ }
- Ok(())
+ if !has_custom_args || fs::metadata(&rustc_dep_info_loc).is_ok() {
+ try!(fs::rename(&rustc_dep_info_loc, &dep_info_loc).chain_error(|| {
+ internal(format!("could not rename dep info: {:?}",
+ rustc_dep_info_loc))
+ }));
+ try!(fingerprint::append_current_dir(&dep_info_loc, &cwd));
+ }
- }), kind))
- }).collect();
+ Ok(())
+ }));
// Add all relevant -L and -l flags from dependencies (now calculated and
// present in `state`) to the command provided
fn add_native_deps(rustc: &mut CommandPrototype,
build_state: &BuildMap,
- native_lib_deps: Vec<PackageId>,
- kind: Kind,
+ build_scripts: &BuildScripts,
pass_l_flag: bool,
- current_id: &PackageId) {
- for id in native_lib_deps.into_iter() {
- debug!("looking up {} {:?}", id, kind);
- let output = &build_state[&(id.clone(), kind)];
+ current_id: &PackageId) -> CargoResult<()> {
+ for key in build_scripts.to_link.iter() {
+ let output = try!(build_state.get(key).chain_error(|| {
+ internal(format!("couldn't find build state for {}/{:?}",
+ key.0, key.1))
+ }));
for path in output.library_paths.iter() {
rustc.arg("-L").arg(path);
}
- if id == *current_id {
+ if key.0 == *current_id {
for cfg in &output.cfgs {
rustc.arg("--cfg").arg(cfg);
}
}
}
}
+ Ok(())
}
}
-fn load_build_deps(cx: &Context, pkg: &Package, target: &Target,
- profile: &Profile, kind: Kind) -> Vec<PackageId> {
- let pkg = cx.get_package(pkg.package_id());
- cx.build_scripts.get(&(pkg.package_id(), target, profile, kind)).map(|deps| {
- deps.iter().map(|&d| d.clone()).collect::<Vec<_>>()
- }).unwrap_or(Vec::new())
+fn load_build_deps(cx: &Context, unit: &Unit) -> Option<Arc<BuildScripts>> {
+ cx.build_scripts.get(unit).cloned()
}
// For all plugin dependencies, add their -L paths (now calculated and
// execute.
fn add_plugin_deps(rustc: &mut CommandPrototype,
build_state: &BuildMap,
- plugin_deps: Vec<PackageId>)
+ build_scripts: &BuildScripts)
-> CargoResult<()> {
let var = util::dylib_path_envvar();
let search_path = rustc.get_env(var).unwrap_or(OsString::new());
let mut search_path = env::split_paths(&search_path).collect::<Vec<_>>();
- for id in plugin_deps.into_iter() {
- debug!("adding libs for plugin dep: {}", id);
- let output = &build_state[&(id, Kind::Host)];
+ for id in build_scripts.plugins.iter() {
+ let key = (id.clone(), Kind::Host);
+ let output = try!(build_state.get(&key).chain_error(|| {
+ internal(format!("couldn't find libs for plugin dep {}", id))
+ }));
for path in output.library_paths.iter() {
search_path.push(path.clone());
}
Ok(())
}
-fn prepare_rustc(package: &Package, target: &Target, profile: &Profile,
+fn prepare_rustc(cx: &Context,
crate_types: Vec<&str>,
- cx: &Context, req: Platform)
- -> CargoResult<Vec<(CommandPrototype, Kind)>> {
- let mut base = try!(process(CommandType::Rustc, package, target, cx));
- build_base_args(cx, &mut base, package, target, profile, &crate_types);
-
- let mut targ_cmd = base.clone();
- let mut host_cmd = base;
- build_plugin_args(&mut targ_cmd, cx, package, target, Kind::Target);
- build_plugin_args(&mut host_cmd, cx, package, target, Kind::Host);
- try!(build_deps_args(&mut targ_cmd, target, profile, package, cx, Kind::Target));
- try!(build_deps_args(&mut host_cmd, target, profile, package, cx, Kind::Host));
-
- Ok(match req {
- Platform::Target => vec![(targ_cmd, Kind::Target)],
- Platform::Plugin => vec![(host_cmd, Kind::Host)],
- Platform::PluginAndTarget if cx.requested_target().is_none() => {
- vec![(targ_cmd, Kind::Target)]
- }
- Platform::PluginAndTarget => vec![(targ_cmd, Kind::Target),
- (host_cmd, Kind::Host)],
- })
+ unit: &Unit) -> CargoResult<CommandPrototype> {
+ let mut base = try!(process(CommandType::Rustc, unit.pkg, cx));
+ build_base_args(cx, &mut base, unit, &crate_types);
+ build_plugin_args(&mut base, cx, unit);
+ try!(build_deps_args(&mut base, cx, unit));
+ Ok(base)
}
-fn rustdoc(package: &Package, target: &Target, profile: &Profile,
- cx: &mut Context) -> CargoResult<Work> {
- let kind = Kind::Target;
- let mut rustdoc = try!(process(CommandType::Rustdoc, package, target, cx));
- rustdoc.arg(&root_path(cx, package, target))
+fn rustdoc(cx: &mut Context, unit: &Unit) -> CargoResult<Work> {
+ let mut rustdoc = try!(process(CommandType::Rustdoc, unit.pkg, cx));
+ rustdoc.arg(&root_path(cx, unit))
.cwd(cx.config.cwd())
- .arg("--crate-name").arg(&target.crate_name());
+ .arg("--crate-name").arg(&unit.target.crate_name());
let mut doc_dir = cx.config.target_dir(cx.get_package(cx.resolve.root()));
if let Some(target) = cx.requested_target() {
doc_dir.push("doc");
rustdoc.arg("-o").arg(doc_dir);
- match cx.resolve.features(package.package_id()) {
- Some(features) => {
- for feat in features {
- rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
- }
+ if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
+ for feat in features {
+ rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
}
- None => {}
}
- try!(build_deps_args(&mut rustdoc, target, profile, package, cx, kind));
+ try!(build_deps_args(&mut rustdoc, cx, unit));
- if package.has_custom_build() {
- rustdoc.env("OUT_DIR", &cx.layout(package, kind).build_out(package));
+ if unit.pkg.has_custom_build() {
+ rustdoc.env("OUT_DIR", &cx.layout(unit.pkg, unit.kind)
+ .build_out(unit.pkg));
}
- trace!("commands={}", rustdoc);
-
- let name = package.name().to_string();
+ let name = unit.pkg.name().to_string();
let desc = rustdoc.to_string();
let exec_engine = cx.exec_engine.clone();
// path is only actually relative if the current directory is an ancestor if it.
// This means that non-path dependencies (git/registry) will likely be shown as
// absolute paths instead of relative paths.
-fn root_path(cx: &Context, pkg: &Package, target: &Target) -> PathBuf {
- let absolute = pkg.root().join(target.src_path());
+fn root_path(cx: &Context, unit: &Unit) -> PathBuf {
+ let absolute = unit.pkg.root().join(unit.target.src_path());
let cwd = cx.config.cwd();
if absolute.starts_with(cwd) {
util::without_prefix(&absolute, cwd).map(|s| {
fn build_base_args(cx: &Context,
cmd: &mut CommandPrototype,
- pkg: &Package,
- target: &Target,
- profile: &Profile,
+ unit: &Unit,
crate_types: &[&str]) {
let Profile {
- opt_level, lto, codegen_units, ref rustc_args, debuginfo, debug_assertions,
- rpath, test, doc: _doc,
- } = *profile;
+ opt_level, lto, codegen_units, ref rustc_args, debuginfo,
+ debug_assertions, rpath, test, doc: _doc, run_custom_build,
+ } = *unit.profile;
+ assert!(!run_custom_build);
// Move to cwd so the root_path() passed below is actually correct
cmd.cwd(cx.config.cwd());
- // TODO: Handle errors in converting paths into args
- cmd.arg(&root_path(cx, pkg, target));
+ cmd.arg(&root_path(cx, unit));
- cmd.arg("--crate-name").arg(&target.crate_name());
+ cmd.arg("--crate-name").arg(&unit.target.crate_name());
for crate_type in crate_types.iter() {
cmd.arg("--crate-type").arg(crate_type);
}
- let prefer_dynamic = (target.for_host() && !target.is_custom_build()) ||
+ let prefer_dynamic = (unit.target.for_host() &&
+ !unit.target.is_custom_build()) ||
(crate_types.contains(&"dylib") &&
- pkg.package_id() != cx.resolve.root());
+ unit.pkg.package_id() != cx.resolve.root());
if prefer_dynamic {
cmd.arg("-C").arg("prefer-dynamic");
}
// Disable LTO for host builds as prefer_dynamic and it are mutually
// exclusive.
- if target.can_lto() && lto && !target.for_host() {
+ if unit.target.can_lto() && lto && !unit.target.for_host() {
cmd.args(&["-C", "lto"]);
} else {
// There are some restrictions with LTO and codegen-units, so we
// only add codegen units when LTO is not used.
- match codegen_units {
- Some(n) => { cmd.arg("-C").arg(&format!("codegen-units={}", n)); }
- None => {},
+ if let Some(n) = codegen_units {
+ cmd.arg("-C").arg(&format!("codegen-units={}", n));
}
}
cmd.args(&["-C", "debug-assertions=off"]);
}
- if test && target.harness() {
+ if test && unit.target.harness() {
cmd.arg("--test");
}
- match cx.resolve.features(pkg.package_id()) {
- Some(features) => {
- for feat in features.iter() {
- cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
- }
+ if let Some(features) = cx.resolve.features(unit.pkg.package_id()) {
+ for feat in features.iter() {
+ cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat));
}
- None => {}
}
- match cx.target_metadata(pkg, target, profile) {
- Some(m) => {
- cmd.arg("-C").arg(&format!("metadata={}", m.metadata));
- cmd.arg("-C").arg(&format!("extra-filename={}", m.extra_filename));
- }
- None => {}
+ if let Some(m) = cx.target_metadata(unit) {
+ cmd.arg("-C").arg(&format!("metadata={}", m.metadata));
+ cmd.arg("-C").arg(&format!("extra-filename={}", m.extra_filename));
}
if rpath {
}
-fn build_plugin_args(cmd: &mut CommandPrototype, cx: &Context, pkg: &Package,
- target: &Target, kind: Kind) {
+fn build_plugin_args(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit) {
fn opt(cmd: &mut CommandPrototype, key: &str, prefix: &str,
val: Option<&OsStr>) {
if let Some(val) = val {
}
}
- cmd.arg("--out-dir").arg(&cx.out_dir(pkg, kind, target));
+ cmd.arg("--out-dir").arg(&cx.out_dir(unit));
cmd.arg("--emit=dep-info,link");
- if kind == Kind::Target {
+ if unit.kind == Kind::Target {
opt(cmd, "--target", "", cx.requested_target().map(|s| s.as_ref()));
}
- opt(cmd, "-C", "ar=", cx.ar(kind).map(|s| s.as_ref()));
- opt(cmd, "-C", "linker=", cx.linker(kind).map(|s| s.as_ref()));
+ opt(cmd, "-C", "ar=", cx.ar(unit.kind).map(|s| s.as_ref()));
+ opt(cmd, "-C", "linker=", cx.linker(unit.kind).map(|s| s.as_ref()));
}
-fn build_deps_args(cmd: &mut CommandPrototype,
- target: &Target,
- profile: &Profile,
- package: &Package,
- cx: &Context,
- kind: Kind)
+fn build_deps_args(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit)
-> CargoResult<()> {
- let layout = cx.layout(package, kind);
+ let layout = cx.layout(unit.pkg, unit.kind);
cmd.arg("-L").arg(&{
let mut root = OsString::from("dependency=");
root.push(layout.root());
deps
});
- if package.has_custom_build() {
- cmd.env("OUT_DIR", &layout.build_out(package));
+ if unit.pkg.has_custom_build() {
+ cmd.env("OUT_DIR", &layout.build_out(unit.pkg));
}
- for (pkg, target, p) in cx.dep_targets(package, target, kind, profile) {
- if target.linkable() {
- try!(link_to(cmd, pkg, target, p, cx, kind));
+ for unit in cx.dep_targets(unit).iter() {
+ if unit.target.linkable() {
+ try!(link_to(cmd, cx, unit));
}
}
return Ok(());
- fn link_to(cmd: &mut CommandPrototype, pkg: &Package, target: &Target,
- profile: &Profile, cx: &Context, kind: Kind) -> CargoResult<()> {
- let kind = kind.for_target(target);
- let layout = cx.layout(pkg, kind);
+ fn link_to(cmd: &mut CommandPrototype, cx: &Context, unit: &Unit)
+ -> CargoResult<()> {
+ let layout = cx.layout(unit.pkg, unit.kind);
- for filename in try!(cx.target_filenames(pkg, target, profile, kind)).iter() {
+ for filename in try!(cx.target_filenames(unit)) {
if filename.ends_with(".a") { continue }
let mut v = OsString::new();
- v.push(&target.crate_name());
+ v.push(&unit.target.crate_name());
v.push("=");
v.push(layout.root());
v.push(&path::MAIN_SEPARATOR.to_string());
}
}
-pub fn process(cmd: CommandType, pkg: &Package, _target: &Target,
+pub fn process(cmd: CommandType, pkg: &Package,
cx: &Context) -> CargoResult<CommandPrototype> {
// When invoking a tool, we need the *host* deps directory in the dynamic
// library search path for plugins and such which have dynamic dependencies.
}
impl Kind {
- fn from(target: &Target) -> Kind {
- if target.for_host() {Kind::Host} else {Kind::Target}
- }
-
fn for_target(&self, target: &Target) -> Kind {
// Once we start compiling for the `Host` kind we continue doing so, but
// if we are a `Target` kind and then we start compiling for a target
fn compile_tests<'a>(manifest_path: &Path,
options: &TestOptions<'a>)
-> CargoResult<Compilation<'a>> {
- let mut compilation = try!(ops::compile(manifest_path, &options.compile_opts));
- for tests in compilation.tests.iter_mut() {
- tests.1.sort();
- }
-
+ let mut compilation = try!(ops::compile(manifest_path,
+ &options.compile_opts));
+ compilation.tests.sort_by(|a, b| {
+ (a.0.package_id(), &a.1).cmp(&(b.0.package_id(), &b.1))
+ });
Ok(compilation)
}
let mut errors = Vec::new();
- for &(ref pkg, ref tests) in &compilation.tests {
- for &(_, ref exe) in tests {
- let to_display = match util::without_prefix(exe, &cwd) {
- Some(path) => path,
- None => &**exe,
- };
- let mut cmd = try!(compilation.target_process(exe, pkg));
- cmd.args(test_args);
- try!(config.shell().concise(|shell| {
- shell.status("Running", to_display.display().to_string())
- }));
- try!(config.shell().verbose(|shell| {
- shell.status("Running", cmd.to_string())
- }));
-
- if let Err(e) = ExecEngine::exec(&mut ProcessEngine, cmd) {
- errors.push(e);
- if !options.no_fail_fast {
- break
- }
+ for &(ref pkg, _, ref exe) in &compilation.tests {
+ let to_display = match util::without_prefix(exe, &cwd) {
+ Some(path) => path,
+ None => &**exe,
+ };
+ let mut cmd = try!(compilation.target_process(exe, pkg));
+ cmd.args(test_args);
+ try!(config.shell().concise(|shell| {
+ shell.status("Running", to_display.display().to_string())
+ }));
+ try!(config.shell().verbose(|shell| {
+ shell.status("Running", cmd.to_string())
+ }));
+
+ if let Err(e) = ExecEngine::exec(&mut ProcessEngine, cmd) {
+ errors.push(e);
+ if !options.no_fail_fast {
+ break
}
}
}
try!(config.shell().status("Doc-tests", name));
let mut p = try!(compilation.rustdoc_process(package));
p.arg("--test").arg(lib)
- .arg("--crate-name").arg(&crate_name)
- .cwd(package.root());
+ .arg("--crate-name").arg(&crate_name);
for &rust_dep in &[&compilation.deps_output, &compilation.root_output] {
let mut arg = OsString::from("dependency=");
pub use self::cargo_compile::{compile, compile_pkg, CompileOptions};
pub use self::cargo_compile::{CompileFilter, CompileMode};
pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages};
-pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind};
+pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind, Unit};
pub use self::cargo_rustc::{Context, LayoutProxy};
-pub use self::cargo_rustc::Platform;
pub use self::cargo_rustc::{BuildOutput, BuildConfig, TargetConfig};
pub use self::cargo_rustc::{CommandType, CommandPrototype, ExecEngine, ProcessEngine};
pub use self::cargo_run::run;
pub use self::Freshness::{Fresh, Dirty};
-pub struct DependencyQueue<K, V> {
+#[derive(Debug)]
+pub struct DependencyQueue<K: Eq + Hash, V> {
/// A list of all known keys to build.
///
/// The value of the hash map is list of dependencies which still need to be
profiles.and_then(|p| p.bench.as_ref())),
doc: merge(Profile::default_doc(),
profiles.and_then(|p| p.doc.as_ref())),
+ custom_build: Profile::default_custom_build(),
};
fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile {
rpath: rpath.unwrap_or(profile.rpath),
test: profile.test,
doc: profile.doc,
+ run_custom_build: profile.run_custom_build,
}
}
}
-use std::env;
-use std::fs::{self, File};
+use std::fs::File;
use std::io::prelude::*;
use support::{project, execs};
.file("build.rs", r#"
use std::env;
fn main() {
- assert_eq!(env::var("DEP_FOO_FOO").unwrap(), "bar");
- assert_eq!(env::var("DEP_FOO_BAR").unwrap(), "baz");
+ assert_eq!(env::var("DEP_FOO_FOO").ok().expect("FOO missing"),
+ "bar");
+ assert_eq!(env::var("DEP_FOO_BAR").ok().expect("BAR missing"),
+ "baz");
}
"#)
.file(".cargo/config", &format!(r#"
fn main() {
println!("cargo:foo=bar");
println!("cargo:bar=baz");
+ std::thread::sleep_ms(500);
}
"#);
a.build();
assert_that(p.cargo_process("build").arg("-v").arg("-j1"),
execs().with_status(0)
- .with_stdout(&format!("\
-[..]
-[..]
-[..]
-[..]
-{running} `[..]a-[..]build-script-build[..]`
+ .with_stdout_contains(&format!("\
{running} `rustc [..] --crate-name a [..]-L bar[..]-L foo[..]`
{compiling} foo v0.5.0 (file://[..])
{running} `rustc [..] --crate-name foo [..] -L bar -L foo`
assert_that(p.cargo_process("build").arg("-v").arg("-j1"),
execs().with_status(0)
- .with_stdout(&format!("\
-[..]
-[..]
-[..]
-[..]
-{running} `[..]a-[..]build-script-build[..]`
+ .with_stdout_contains(&format!("\
{running} `rustc [..] --crate-name a [..]-L bar[..]-L foo[..]`
{compiling} foo v0.5.0 (file://[..])
{running} `rustc [..] --crate-name foo [..] -L bar -L foo`
path = "../b"
"#)
.file("b/src/lib.rs", "");
- assert_that(p.cargo_process("build"),
+ assert_that(p.cargo_process("build").arg("-v"),
execs().with_status(0));
});
"#);
assert_that(build.cargo_process("build"),
execs().with_status(0));
- let src = build.root().join("target/debug");
- let lib = fs::read_dir(&src).unwrap().map(|s| s.unwrap().path()).find(|lib| {
- let lib = lib.file_name().unwrap().to_str().unwrap();
- lib.starts_with(env::consts::DLL_PREFIX) &&
- lib.ends_with(env::consts::DLL_SUFFIX)
- }).unwrap();
- let libname = lib.file_name().unwrap().to_str().unwrap();
- let libname = &libname[env::consts::DLL_PREFIX.len()..
- libname.len() - env::consts::DLL_SUFFIX.len()];
let foo = project("foo")
.file("Cargo.toml", r#"
fn main() {
let src = PathBuf::from(env::var("SRC").unwrap());
- println!("cargo:rustc-flags=-L {}", src.parent().unwrap()
- .display());
+ println!("cargo:rustc-link-search={}/target/debug",
+ src.display());
}
"#)
- .file("bar/src/lib.rs", &format!(r#"
- pub fn bar() {{
- #[link(name = "{}")]
- extern {{ fn foo(); }}
- unsafe {{ foo() }}
- }}
- "#, libname));
+ .file("bar/src/lib.rs", r#"
+ pub fn bar() {
+ #[link(name = "builder")]
+ extern { fn foo(); }
+ unsafe { foo() }
+ }
+ "#);
- assert_that(foo.cargo_process("build").env("SRC", &lib),
+ assert_that(foo.cargo_process("build").env("SRC", build.root()),
execs().with_status(0));
});
println!("cargo:rustc-cfg=foo");
}
"#);
- assert_that(build.cargo_process("build"),
+ assert_that(build.cargo_process("build").arg("-v"),
execs().with_status(0));
});
assert_that(p.cargo("test").arg("-v").arg("-pb").arg("--lib"),
execs().with_status(0).with_stdout(&format!("\
+{fresh} a v0.5.0 ([..]
{compiling} b v0.5.0 ([..]
{running} `rustc b[..]src[..]lib.rs [..] -L test[..]`
-{fresh} a v0.5.0 ([..]
{running} `[..]b-[..]`
running 0 tests
}}
"#, libname));
- assert_that(foo.cargo_process("build").env("SRC", &lib),
+ assert_that(foo.cargo_process("build").env("SRC", &lib).arg("-v"),
execs().with_status(0));
});
.file("src/lib.rs", "")
.file("tests/it_works.rs", "");
- assert_that(p.cargo_process("test"),
+ assert_that(p.cargo_process("test").arg("-v"),
execs().with_status(0));
});
"#, alternate_arch()));
let target = alternate();
- assert_that(p.cargo_process("build").arg("--target").arg(&target),
+ assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"),
execs().with_status(0));
assert_that(&p.target_bin(&target, "foo"), existing_file());
{running} `rustc d1[..]build.rs [..] --out-dir {dir}[..]target[..]build[..]d1-[..]`
{running} `{dir}[..]target[..]build[..]d1-[..]build-script-build`
{running} `{dir}[..]target[..]build[..]d1-[..]build-script-build`
-{running} `rustc d1[..]src[..]lib.rs [..] --target {target} [..] \
- -L /path/to/{target}`
-{running} `rustc d1[..]src[..]lib.rs [..] \
- -L /path/to/{host}`
+{running} `rustc d1[..]src[..]lib.rs [..]`
+{running} `rustc d1[..]src[..]lib.rs [..]`
{compiling} d2 v0.0.0 ({url})
{running} `rustc d2[..]src[..]lib.rs [..] \
-L /path/to/{host}`
_ => panic!("unknown platform")
}};
- env::var(expected).unwrap();
- env::var(not_expected).unwrap_err();
+ env::var(expected).ok()
+ .expect(&format!("missing {{}}", expected));
+ env::var(not_expected).err()
+ .expect(&format!("found {{}}", not_expected));
}}
"#, host = host, target = target))
.file("src/lib.rs", "")
assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0));
assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target),
execs().with_status(0));
-});
\ No newline at end of file
+});
assert_that(p.cargo("test").arg("-v"),
execs().with_status(101));
});
+
+test!(no_rebuild_transitive_target_deps {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ a = { path = "a" }
+ [dev-dependencies]
+ b = { path = "b" }
+ "#)
+ .file("src/lib.rs", "")
+ .file("tests/foo.rs", "")
+ .file("a/Cargo.toml", r#"
+ [package]
+ name = "a"
+ version = "0.0.1"
+ authors = []
+
+ [target.foo.dependencies]
+ c = { path = "../c" }
+ "#)
+ .file("a/src/lib.rs", "")
+ .file("b/Cargo.toml", r#"
+ [package]
+ name = "b"
+ version = "0.0.1"
+ authors = []
+
+ [dependencies]
+ c = { path = "../c" }
+ "#)
+ .file("b/src/lib.rs", "")
+ .file("c/Cargo.toml", r#"
+ [package]
+ name = "c"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("c/src/lib.rs", "");
+
+ assert_that(p.cargo_process("build"),
+ execs().with_status(0));
+ assert_that(p.cargo("test").arg("--no-run"),
+ execs().with_status(0)
+ .with_stdout(&format!("\
+{compiling} c v0.0.1 ([..])
+{compiling} b v0.0.1 ([..])
+{compiling} foo v0.0.1 ([..])
+", compiling = COMPILING)));
+});
+use std::fs::File;
+use std::io::prelude::*;
use std::str;
+use std::thread;
use support::{project, execs, basic_bin_manifest, basic_lib_manifest};
use support::{COMPILING, RUNNING, DOCTEST};
test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
", running = RUNNING)));
});
+
+test!(bin_does_not_rebuild_tests {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [package]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ "#)
+ .file("src/lib.rs", "")
+ .file("src/main.rs", "fn main() {}")
+ .file("tests/foo.rs", "");
+ p.build();
+
+ assert_that(p.cargo("test").arg("-v"),
+ execs().with_status(0));
+
+ thread::sleep_ms(1000);
+ File::create(&p.root().join("src/main.rs")).unwrap()
+ .write_all(b"fn main() { 3; }").unwrap();
+
+ assert_that(p.cargo("test").arg("-v").arg("--no-run"),
+ execs().with_status(0)
+ .with_stdout(&format!("\
+{compiling} foo v0.0.1 ([..])
+{running} `rustc src/main.rs [..]`
+{running} `rustc src/main.rs [..]`
+", compiling = COMPILING, running = RUNNING)));
+});